# Merged Python Files #### address.py import logging logger = logging.getLogger(__name__) import struct import bitcoin from counterpartylib.lib import config from counterpartylib.lib import script def address_scriptpubkey(address): try: bech32 = bitcoin.bech32.CBech32Data(address) return b''.join([b'\x00\x14', bech32.to_bytes()]) except Exception as e: bs58 = bitcoin.base58.decode(address)[1:-4] return b''.join([b'\x76\xa9\x14', bs58, b'\x88\xac']) def pack(address): """ Converts a base58 bitcoin address into a 21 byte bytes object """ from .util import enabled # Here to account for test mock changes if enabled('segwit_support'): try: bech32 = bitcoin.bech32.CBech32Data(address) witver = (0x80 + bech32.witver).to_bytes(1, byteorder='big') # mark the first byte for segwit witprog = bech32.to_bytes() if len(witprog) > 20: raise Exception('p2wsh still not supported for sending') return b''.join([witver, witprog]) except Exception as ne: try: script.validate(address) #This will check if the address is valid short_address_bytes = bitcoin.base58.decode(address)[:-4] return short_address_bytes except bitcoin.base58.InvalidBase58Error as e: raise e except Exception as e: raise Exception(('The address {} is not a valid bitcoin address ({})').format(address,'testnet' if config.TESTNET or config.REGTEST else 'mainnet')) else: try: short_address_bytes = bitcoin.base58.decode(address)[:-4] return short_address_bytes except bitcoin.base58.InvalidBase58Error as e: raise e # retuns both the message type id and the remainder of the message data def unpack(short_address_bytes): """ Converts a 21 byte prefix and public key hash into a full base58 bitcoin address """ from .util import enabled # Here to account for test mock changes if enabled('segwit_support') and short_address_bytes[0] >= 0x80 and short_address_bytes[0] <= 0x8F: # we have a segwit address here witver = short_address_bytes[0] - 0x80 witprog = short_address_bytes[1:] return str(bitcoin.bech32.CBech32Data.from_bytes(witver, witprog)) else: check = bitcoin.core.Hash(short_address_bytes)[0:4] return bitcoin.base58.encode(short_address_bytes + check) #### api.py #! /usr/bin/python3 """ The database connections are read‐only, so SQL injection attacks can’t be a problem. """ import sys import os import threading import decimal import time import json import re import requests import collections import logging import traceback logger = logging.getLogger(__name__) from logging import handlers as logging_handlers D = decimal.Decimal import binascii import math import struct import apsw import flask from flask_httpauth import HTTPBasicAuth import jsonrpc from jsonrpc import dispatcher from jsonrpc.exceptions import JSONRPCDispatchException import inspect from xmltodict import unparse as serialize_to_xml from counterpartylib.lib import config from counterpartylib.lib import exceptions from counterpartylib.lib import util from counterpartylib.lib import check from counterpartylib.lib import backend from counterpartylib.lib import database from counterpartylib.lib import transaction from counterpartylib.lib import blocks from counterpartylib.lib import script from counterpartylib.lib import message_type from counterpartylib.lib.messages import send from counterpartylib.lib.messages.versions import enhanced_send from counterpartylib.lib.messages import order from counterpartylib.lib.messages import btcpay from counterpartylib.lib.messages import issuance from counterpartylib.lib.messages import broadcast from counterpartylib.lib.messages import bet from counterpartylib.lib.messages import dividend from counterpartylib.lib.messages import burn from counterpartylib.lib.messages import destroy from counterpartylib.lib.messages import cancel from counterpartylib.lib.messages import rps from counterpartylib.lib.messages import rpsresolve from counterpartylib.lib.messages import sweep from counterpartylib.lib.messages import dispenser API_TABLES = ['assets', 'balances', 'credits', 'debits', 'bets', 'bet_matches', 'broadcasts', 'btcpays', 'burns', 'cancels', 'destructions', 'dividends', 'issuances', 'orders', 'order_matches', 'sends', 'bet_expirations', 'order_expirations', 'bet_match_expirations', 'order_match_expirations', 'bet_match_resolutions', 'rps', 'rpsresolves', 'rps_matches', 'rps_expirations', 'rps_match_expirations', 'mempool', 'sweeps', 'dispensers', 'dispenses','transactions'] API_TRANSACTIONS = ['bet', 'broadcast', 'btcpay', 'burn', 'cancel', 'destroy', 'dividend', 'issuance', 'order', 'send', 'rps', 'rpsresolve', 'sweep', 'dispenser'] COMMONS_ARGS = ['encoding', 'fee_per_kb', 'regular_dust_size', 'multisig_dust_size', 'op_return_value', 'pubkey', 'allow_unconfirmed_inputs', 'fee', 'fee_provided', 'estimate_fee_per_kb', 'estimate_fee_per_kb_nblocks', 'estimate_fee_per_kb_conf_target', 'estimate_fee_per_kb_mode', 'unspent_tx_hash', 'custom_inputs', 'dust_return_pubkey', 'disable_utxo_locks', 'extended_tx_info', 'p2sh_source_multisig_pubkeys', 'p2sh_source_multisig_pubkeys_required', 'p2sh_pretx_txid'] API_MAX_LOG_SIZE = 10 * 1024 * 1024 #max log size of 20 MB before rotation (make configurable later) API_MAX_LOG_COUNT = 10 JSON_RPC_ERROR_API_COMPOSE = -32001 #code to use for error composing transaction result current_api_status_code = None #is updated by the APIStatusPoller current_api_status_response_json = None #is updated by the APIStatusPoller class APIError(Exception): pass class BackendError(Exception): pass def check_backend_state(): """Checks blocktime of last block to see if {} Core is running behind.""".format(config.BTC_NAME) block_count = backend.getblockcount() block_hash = backend.getblockhash(block_count) cblock = backend.getblock(block_hash) time_behind = time.time() - cblock.nTime # TODO: Block times are not very reliable. if time_behind > 60 * 60 * 2: # Two hours. raise BackendError('Bitcoind is running about {} hours behind.'.format(round(time_behind / 3600))) # check backend index blocks_behind = backend.getindexblocksbehind() if blocks_behind > 5: raise BackendError('Indexd is running {} blocks behind.'.format(blocks_behind)) logger.debug('Backend state check passed.') class DatabaseError(Exception): pass def check_database_state(db, blockcount): """Checks {} database to see if is caught up with backend.""".format(config.XCP_NAME) if util.CURRENT_BLOCK_INDEX + 1 < blockcount: raise DatabaseError('{} database is behind backend.'.format(config.XCP_NAME)) logger.debug('Database state check passed.') return # TODO: ALL queries EVERYWHERE should be done with these methods def db_query(db, statement, bindings=(), callback=None, **callback_args): """Allow direct access to the database in a parametrized manner.""" cursor = db.cursor() # Sanitize. forbidden_words = ['pragma', 'attach', 'database', 'begin', 'transaction'] for word in forbidden_words: #This will find if the forbidden word is in the statement as a whole word. For example, "transactions" will be allowed because the "s" at the end if re.search(r"\b"+word+"\b", statement.lower()): raise APIError("Forbidden word in query: '{}'.".format(word)) if hasattr(callback, '__call__'): cursor.execute(statement, bindings) for row in cursor: callback(row, **callback_args) results = None else: results = list(cursor.execute(statement, bindings)) cursor.close() return results def get_rows(db, table, filters=None, filterop='AND', order_by=None, order_dir=None, start_block=None, end_block=None, status=None, limit=1000, offset=0, show_expired=True): """SELECT * FROM wrapper. Filters results based on a filter data structure (as used by the API).""" if filters == None: filters = [] def value_to_marker(value): # if value is an array place holder is (?,?,?,..) if isinstance(value, list): return '''({})'''.format(','.join(['?' for e in range(0, len(value))])) else: return '''?''' # TODO: Document that op can be anything that SQLite3 accepts. if not table or table.lower() not in API_TABLES: raise APIError('Unknown table') if filterop and filterop.upper() not in ['OR', 'AND']: raise APIError('Invalid filter operator (OR, AND)') if order_dir and order_dir.upper() not in ['ASC', 'DESC']: raise APIError('Invalid order direction (ASC, DESC)') if not isinstance(limit, int): raise APIError('Invalid limit') elif config.API_LIMIT_ROWS != 0 and limit > config.API_LIMIT_ROWS: raise APIError('Limit should be lower or equal to %i' % config.API_LIMIT_ROWS) elif config.API_LIMIT_ROWS != 0 and limit == 0: raise APIError('Limit should be greater than 0') if not isinstance(offset, int): raise APIError('Invalid offset') # TODO: accept an object: {'field1':'ASC', 'field2': 'DESC'} if order_by and not re.compile('^[a-z0-9_]+$').match(order_by): raise APIError('Invalid order_by, must be a field name') if isinstance(filters, dict): #single filter entry, convert to a one entry list filters = [filters,] elif not isinstance(filters, list): filters = [] # TODO: Document this! (Each filter can be an ordered list.) new_filters = [] for filter_ in filters: if type(filter_) in (list, tuple) and len(filter_) in [3, 4]: new_filter = {'field': filter_[0], 'op': filter_[1], 'value': filter_[2]} if len(filter_) == 4: new_filter['case_sensitive'] = filter_[3] new_filters.append(new_filter) elif type(filter_) == dict: new_filters.append(filter_) else: raise APIError('Unknown filter type') filters = new_filters # validate filter(s) for filter_ in filters: for field in ['field', 'op', 'value']: #should have all fields if field not in filter_: raise APIError("A specified filter is missing the '%s' field" % field) if not isinstance(filter_['value'], (str, int, float, list)): raise APIError("Invalid value for the field '%s'" % filter_['field']) if isinstance(filter_['value'], list) and filter_['op'].upper() not in ['IN', 'NOT IN']: raise APIError("Invalid value for the field '%s'" % filter_['field']) if filter_['op'].upper() not in ['=', '==', '!=', '>', '<', '>=', '<=', 'IN', 'LIKE', 'NOT IN', 'NOT LIKE']: raise APIError("Invalid operator for the field '%s'" % filter_['field']) if 'case_sensitive' in filter_ and not isinstance(filter_['case_sensitive'], bool): raise APIError("case_sensitive must be a boolean") # special case for memo and memo_hex field searches if table == 'sends': adjust_get_sends_memo_filters(filters) # SELECT statement = '''SELECT * FROM {}'''.format(table) # WHERE bindings = [] conditions = [] for filter_ in filters: case_sensitive = False if 'case_sensitive' not in filter_ else filter_['case_sensitive'] if filter_['op'] == 'LIKE' and case_sensitive == False: filter_['field'] = '''UPPER({})'''.format(filter_['field']) filter_['value'] = filter_['value'].upper() marker = value_to_marker(filter_['value']) conditions.append('''{} {} {}'''.format(filter_['field'], filter_['op'], marker)) if isinstance(filter_['value'], list): bindings += filter_['value'] else: bindings.append(filter_['value']) # AND filters more_conditions = [] if table not in ['balances', 'order_matches', 'bet_matches']: if start_block != None: more_conditions.append('''block_index >= ?''') bindings.append(start_block) if end_block != None: more_conditions.append('''block_index <= ?''') bindings.append(end_block) elif table in ['order_matches', 'bet_matches']: if start_block != None: more_conditions.append('''tx0_block_index >= ?''') bindings.append(start_block) if end_block != None: more_conditions.append('''tx1_block_index <= ?''') bindings.append(end_block) # status if isinstance(status, list) and len(status) > 0: more_conditions.append('''status IN {}'''.format(value_to_marker(status))) bindings += status elif isinstance(status, str) and status != '': more_conditions.append('''status == ?''') bindings.append(status) # legacy filters if not show_expired and table == 'orders': #Ignore BTC orders one block early. expire_index = util.CURRENT_BLOCK_INDEX + 1 more_conditions.append('''((give_asset == ? AND expire_index > ?) OR give_asset != ?)''') bindings += [config.BTC, expire_index, config.BTC] if (len(conditions) + len(more_conditions)) > 0: statement += ''' WHERE''' all_conditions = [] if len(conditions) > 0: all_conditions.append('''({})'''.format(''' {} '''.format(filterop.upper()).join(conditions))) if len(more_conditions) > 0: all_conditions.append('''({})'''.format(''' AND '''.join(more_conditions))) statement += ''' {}'''.format(''' AND '''.join(all_conditions)) # ORDER BY if order_by != None: statement += ''' ORDER BY {}'''.format(order_by) if order_dir != None: statement += ''' {}'''.format(order_dir.upper()) # LIMIT if limit and limit > 0: statement += ''' LIMIT {}'''.format(limit) if offset: statement += ''' OFFSET {}'''.format(offset) query_result = db_query(db, statement, tuple(bindings)) if table == 'balances': return adjust_get_balances_results(query_result, db) if table == 'destructions': return adjust_get_destructions_results(query_result) if table == 'sends': # for sends, handle the memo field properly return adjust_get_sends_results(query_result) if table == 'transactions': # for transactions, handle the data field properly return adjust_get_transactions_results(query_result) return query_result def adjust_get_balances_results(query_result, db): filtered_results = [] assets = {} for balances_row in list(query_result): asset = balances_row['asset'] if not asset in assets: assets[asset] = util.is_divisible(db, asset) balances_row['divisible'] = assets[asset] filtered_results.append(balances_row) return filtered_results def adjust_get_destructions_results(query_result): filtered_results = [] for destruction_row in list(query_result): if type(destruction_row['tag']) == bytes: destruction_row['tag'] = destruction_row['tag'].decode('utf-8', 'ignore') filtered_results.append(destruction_row) return filtered_results def adjust_get_sends_memo_filters(filters): """Convert memo to a byte string. If memo_hex is supplied, attempt to decode it and use that instead.""" for filter_ in filters: if filter_['field'] == 'memo': filter_['value'] = bytes(filter_['value'], 'utf-8') if filter_['field'] == 'memo_hex': # search the indexed memo field with a byte string filter_['field'] = 'memo' try: filter_['value'] = bytes.fromhex(filter_['value']) except ValueError as e: raise APIError("Invalid memo_hex value") def adjust_get_sends_results(query_result): """Format the memo_hex field. Try and decode the memo from a utf-8 uncoded string. Invalid utf-8 strings return an empty memo.""" filtered_results = [] for send_row in list(query_result): try: if send_row['memo'] is None: send_row['memo_hex'] = None send_row['memo'] = None else: if type(send_row['memo']) == str: send_row['memo'] = bytes(send_row['memo'], 'utf-8') send_row['memo_hex'] = binascii.hexlify(send_row['memo']).decode('utf8') send_row['memo'] = send_row['memo'].decode('utf-8') except UnicodeDecodeError: send_row['memo'] = '' filtered_results.append(send_row) return filtered_results def adjust_get_transactions_results(query_result): """Format the data field. Try and decode the data from a utf-8 uncoded string. Invalid utf-8 strings return an empty data.""" filtered_results = [] for transaction_row in list(query_result): transaction_row['data'] = transaction_row['data'].hex() filtered_results.append(transaction_row) return filtered_results def compose_transaction(db, name, params, encoding='auto', fee_per_kb=None, estimate_fee_per_kb=None, estimate_fee_per_kb_conf_target=config.ESTIMATE_FEE_CONF_TARGET, estimate_fee_per_kb_mode=config.ESTIMATE_FEE_MODE, regular_dust_size=config.DEFAULT_REGULAR_DUST_SIZE, multisig_dust_size=config.DEFAULT_MULTISIG_DUST_SIZE, op_return_value=config.DEFAULT_OP_RETURN_VALUE, pubkey=None, allow_unconfirmed_inputs=False, fee=None, fee_provided=0, unspent_tx_hash=None, custom_inputs=None, dust_return_pubkey=None, disable_utxo_locks=False, extended_tx_info=False, p2sh_source_multisig_pubkeys=None, p2sh_source_multisig_pubkeys_required=None, p2sh_pretx_txid=None, old_style_api=True, segwit=False): """Create and return a transaction.""" # Get provided pubkeys. if type(pubkey) == str: provided_pubkeys = [pubkey] elif type(pubkey) == list: provided_pubkeys = pubkey elif pubkey == None: provided_pubkeys = [] else: assert False # Get additional pubkeys from `source` and `destination` params. # Convert `source` and `destination` to pubkeyhash form. for address_name in ['source', 'destination']: if address_name in params: address = params[address_name] if isinstance(address, list): #pkhshs = [] #for addr in address: # provided_pubkeys += script.extract_pubkeys(addr) # pkhshs.append(script.make_pubkeyhash(addr)) #params[address_name] = pkhshs pass else: provided_pubkeys += script.extract_pubkeys(address) params[address_name] = script.make_pubkeyhash(address) # Check validity of collected pubkeys. for pubkey in provided_pubkeys: if not script.is_fully_valid(binascii.unhexlify(pubkey)): raise script.AddressError('invalid public key: {}'.format(pubkey)) compose_method = sys.modules['counterpartylib.lib.messages.{}'.format(name)].compose compose_params = inspect.getargspec(compose_method)[0] missing_params = [p for p in compose_params if p not in params and p != 'db'] for param in missing_params: params[param] = None # dont override fee_per_kb if specified if fee_per_kb is not None: estimate_fee_per_kb = False else: fee_per_kb = config.DEFAULT_FEE_PER_KB if 'extended_tx_info' in params: extended_tx_info = params['extended_tx_info'] del params['extended_tx_info'] if 'old_style_api' in params: old_style_api = params['old_style_api'] del params['old_style_api'] if 'segwit' in params: segwit = params['segwit'] del params['segwit'] tx_info = compose_method(db, **params) return transaction.construct(db, tx_info, encoding=encoding, fee_per_kb=fee_per_kb, estimate_fee_per_kb=estimate_fee_per_kb, estimate_fee_per_kb_conf_target=estimate_fee_per_kb_conf_target, regular_dust_size=regular_dust_size, multisig_dust_size=multisig_dust_size, op_return_value=op_return_value, provided_pubkeys=provided_pubkeys, allow_unconfirmed_inputs=allow_unconfirmed_inputs, exact_fee=fee, fee_provided=fee_provided, unspent_tx_hash=unspent_tx_hash, custom_inputs=custom_inputs, dust_return_pubkey=dust_return_pubkey, disable_utxo_locks=disable_utxo_locks, extended_tx_info=extended_tx_info, p2sh_source_multisig_pubkeys=p2sh_source_multisig_pubkeys, p2sh_source_multisig_pubkeys_required=p2sh_source_multisig_pubkeys_required, p2sh_pretx_txid=p2sh_pretx_txid, old_style_api=old_style_api, segwit=segwit) def conditional_decorator(decorator, condition): """Checks the condition and if True applies specified decorator.""" def gen_decorator(f): if not condition: return f return decorator(f) return gen_decorator def init_api_access_log(app): """Initialize API logger.""" loggers = (logging.getLogger('werkzeug'), app.logger) # Disable console logging... for l in loggers: l.setLevel(logging.INFO) l.propagate = False # Log to file, if configured... if config.API_LOG: handler = logging_handlers.RotatingFileHandler(config.API_LOG, 'a', API_MAX_LOG_SIZE, API_MAX_LOG_COUNT) for l in loggers: l.addHandler(handler) class APIStatusPoller(threading.Thread): """Perform regular checks on the state of the backend and the database.""" def __init__(self): self.last_database_check = 0 threading.Thread.__init__(self) self.stop_event = threading.Event() def stop(self): self.stop_event.set() def run(self): logger.debug('Starting API Status Poller.') global current_api_status_code, current_api_status_response_json db = database.get_connection(read_only=True, integrity_check=False) while self.stop_event.is_set() != True: try: # Check that backend is running, communicable, and caught up with the blockchain. # Check that the database has caught up with bitcoind. if time.time() - self.last_database_check > 10 * 60: # Ten minutes since last check. if not config.FORCE: code = 11 logger.debug('Checking backend state.') check_backend_state() code = 12 logger.debug('Checking database state.') check_database_state(db, backend.getblockcount()) self.last_database_check = time.time() except (BackendError, DatabaseError) as e: exception_name = e.__class__.__name__ exception_text = str(e) logger.debug("API Status Poller: %s", exception_text) jsonrpc_response = jsonrpc.exceptions.JSONRPCServerError(message=exception_name, data=exception_text) current_api_status_code = code current_api_status_response_json = jsonrpc_response.json.encode() else: current_api_status_code = None current_api_status_response_json = None time.sleep(config.BACKEND_POLL_INTERVAL) class APIServer(threading.Thread): """Handle JSON-RPC API calls.""" def __init__(self, db=None): self.db = db self.is_ready = False threading.Thread.__init__(self) self.stop_event = threading.Event() def stop(self): self.join() self.stop_event.set() def run(self): logger.info('Starting API Server.') self.db = self.db or database.get_connection(read_only=True, integrity_check=False) app = flask.Flask(__name__) auth = HTTPBasicAuth() @auth.get_password def get_pw(username): if username == config.RPC_USER: return config.RPC_PASSWORD return None ###################### #READ API # Generate dynamically get_{table} methods def generate_get_method(table): def get_method(**kwargs): try: return get_rows(self.db, table=table, **kwargs) except TypeError as e: #TODO: generalise for all API methods raise APIError(str(e)) return get_method for table in API_TABLES: new_method = generate_get_method(table) new_method.__name__ = 'get_{}'.format(table) dispatcher.add_method(new_method) @dispatcher.add_method def sql(query, bindings=None): if bindings == None: bindings = [] return db_query(self.db, query, tuple(bindings)) ###################### #WRITE/ACTION API # Generate dynamically create_{transaction} methods def generate_create_method(tx): def split_params(**kwargs): transaction_args = {} common_args = {} private_key_wif = None for key in kwargs: if key in COMMONS_ARGS: common_args[key] = kwargs[key] elif key == 'privkey': private_key_wif = kwargs[key] else: transaction_args[key] = kwargs[key] return transaction_args, common_args, private_key_wif def create_method(**kwargs): try: transaction_args, common_args, private_key_wif = split_params(**kwargs) return compose_transaction(self.db, name=tx, params=transaction_args, **common_args) except (TypeError, script.AddressError, exceptions.ComposeError, exceptions.TransactionError, exceptions.BalanceError) as error: # TypeError happens when unexpected keyword arguments are passed in error_msg = "Error composing {} transaction via API: {}".format(tx, str(error)) logging.warning(error_msg) logging.warning(traceback.format_exc()) raise JSONRPCDispatchException(code=JSON_RPC_ERROR_API_COMPOSE, message=error_msg) return create_method for tx in API_TRANSACTIONS: create_method = generate_create_method(tx) create_method.__name__ = 'create_{}'.format(tx) dispatcher.add_method(create_method) @dispatcher.add_method def get_messages(block_index): if not isinstance(block_index, int): raise APIError("block_index must be an integer.") cursor = self.db.cursor() cursor.execute('select * from messages where block_index = ? order by message_index asc', (block_index,)) messages = cursor.fetchall() cursor.close() return messages @dispatcher.add_method def get_messages_by_index(message_indexes): """Get specific messages from the feed, based on the message_index. @param message_index: A single index, or a list of one or more message indexes to retrieve. """ if not isinstance(message_indexes, list): message_indexes = [message_indexes,] for idx in message_indexes: #make sure the data is clean if not isinstance(idx, int): raise APIError("All items in message_indexes are not integers") cursor = self.db.cursor() cursor.execute('SELECT * FROM messages WHERE message_index IN (%s) ORDER BY message_index ASC' % (','.join([str(x) for x in message_indexes]),)) messages = cursor.fetchall() cursor.close() return messages @dispatcher.add_method def get_supply(asset): if asset == 'BTC': return backend.get_btc_supply(normalize=False) elif asset == 'XCP': return util.xcp_supply(self.db) else: asset = util.resolve_subasset_longname(self.db, asset) return util.asset_supply(self.db, asset) @dispatcher.add_method def get_xcp_supply(): logger.warning("Deprecated method: `get_xcp_supply`") return util.xcp_supply(self.db) @dispatcher.add_method def get_asset_info(assets=None, asset=None): if asset is not None: assets = [asset] if not isinstance(assets, list): raise APIError("assets must be a list of asset names, even if it just contains one entry") assetsInfo = [] for asset in assets: asset = util.resolve_subasset_longname(self.db, asset) # BTC and XCP. if asset in [config.BTC, config.XCP]: if asset == config.BTC: supply = backend.get_btc_supply(normalize=False) else: supply = util.xcp_supply(self.db) assetsInfo.append({ 'asset': asset, 'asset_longname': None, 'owner': None, 'divisible': True, 'locked': False, 'supply': supply, 'description': '', 'issuer': None }) continue # User‐created asset. cursor = self.db.cursor() issuances = list(cursor.execute('''SELECT * FROM issuances WHERE (status = ? AND asset = ?) ORDER BY block_index ASC''', ('valid', asset))) cursor.close() if not issuances: continue #asset not found, most likely else: last_issuance = issuances[-1] locked = False for e in issuances: if e['locked']: locked = True assetsInfo.append({ 'asset': asset, 'asset_longname': last_issuance['asset_longname'], 'owner': last_issuance['issuer'], 'divisible': bool(last_issuance['divisible']), 'locked': locked, 'supply': util.asset_supply(self.db, asset), 'description': last_issuance['description'], 'issuer': last_issuance['issuer']}) return assetsInfo @dispatcher.add_method def get_block_info(block_index): assert isinstance(block_index, int) cursor = self.db.cursor() cursor.execute('''SELECT * FROM blocks WHERE block_index = ?''', (block_index,)) blocks = list(cursor) if len(blocks) == 1: block = blocks[0] elif len(blocks) == 0: raise exceptions.DatabaseError('No blocks found.') else: assert False cursor.close() return block @dispatcher.add_method def fee_per_kb(conf_target=config.ESTIMATE_FEE_CONF_TARGET, mode=config.ESTIMATE_FEE_MODE): return backend.fee_per_kb(conf_target, mode) @dispatcher.add_method def get_blocks(block_indexes, min_message_index=None): """fetches block info and messages for the specified block indexes @param min_message_index: Retrieve blocks from the message feed on or after this specific message index (useful since blocks may appear in the message feed more than once, if a reorg occurred). Note that if this parameter is not specified, the messages for the first block will be returned. """ if not isinstance(block_indexes, (list, tuple)): raise APIError("block_indexes must be a list of integers.") if len(block_indexes) >= 250: raise APIError("can only specify up to 250 indexes at a time.") block_indexes_str = ','.join([str(x) for x in block_indexes]) cursor = self.db.cursor() # The blocks table gets rolled back from undolog, so min_message_index doesn't matter for this query cursor.execute('SELECT * FROM blocks WHERE block_index IN (%s) ORDER BY block_index ASC' % (block_indexes_str,)) blocks = cursor.fetchall() cursor.execute('SELECT * FROM messages WHERE block_index IN (%s) ORDER BY message_index ASC' % (block_indexes_str,)) messages = collections.deque(cursor.fetchall()) # Discard any messages less than min_message_index if min_message_index: while len(messages) and messages[0]['message_index'] < min_message_index: messages.popleft() # Packages messages into their appropriate block in the data structure to be returned for block in blocks: block['_messages'] = [] while len(messages) and messages[0]['block_index'] == block['block_index']: block['_messages'].append(messages.popleft()) #NOTE: if len(messages), then we're only returning the messages for the first set of blocks before the reorg cursor.close() return blocks @dispatcher.add_method def get_running_info(): latestBlockIndex = backend.getblockcount() try: check_database_state(self.db, latestBlockIndex) except DatabaseError: caught_up = False else: caught_up = True try: cursor = self.db.cursor() blocks = list(cursor.execute('''SELECT * FROM blocks WHERE block_index = ?''', (util.CURRENT_BLOCK_INDEX, ))) assert len(blocks) == 1 last_block = blocks[0] cursor.close() except: last_block = None try: last_message = util.last_message(self.db) except: last_message = None try: indexd_blocks_behind = backend.getindexblocksbehind() except: indexd_blocks_behind = latestBlockIndex if latestBlockIndex > 0 else 999999 indexd_caught_up = indexd_blocks_behind <= 1 server_ready = caught_up and indexd_caught_up return { 'server_ready': server_ready, 'db_caught_up': caught_up, 'bitcoin_block_count': latestBlockIndex, 'last_block': last_block, 'indexd_caught_up': indexd_caught_up, 'indexd_blocks_behind': indexd_blocks_behind, 'last_message_index': last_message['message_index'] if last_message else -1, 'api_limit_rows': config.API_LIMIT_ROWS, 'running_testnet': config.TESTNET, 'running_regtest': config.REGTEST, 'running_testcoin': config.TESTCOIN, 'version_major': config.VERSION_MAJOR, 'version_minor': config.VERSION_MINOR, 'version_revision': config.VERSION_REVISION } @dispatcher.add_method def get_element_counts(): counts = {} cursor = self.db.cursor() for element in ['transactions', 'blocks', 'debits', 'credits', 'balances', 'sends', 'orders', 'order_matches', 'btcpays', 'issuances', 'broadcasts', 'bets', 'bet_matches', 'dividends', 'burns', 'cancels', 'order_expirations', 'bet_expirations', 'order_match_expirations', 'bet_match_expirations', 'messages', 'destructions']: cursor.execute("SELECT COUNT(*) AS count FROM %s" % element) count_list = cursor.fetchall() assert len(count_list) == 1 counts[element] = count_list[0]['count'] cursor.close() return counts @dispatcher.add_method def get_asset_names(longnames=False): cursor = self.db.cursor() if longnames: names = [] for row in cursor.execute("SELECT asset, asset_longname FROM issuances WHERE status = 'valid' GROUP BY asset ORDER BY asset ASC"): names.append({'asset': row['asset'], 'asset_longname': row['asset_longname']}) else: names = [row['asset'] for row in cursor.execute("SELECT DISTINCT asset FROM issuances WHERE status = 'valid' ORDER BY asset ASC")] cursor.close() return names @dispatcher.add_method def get_asset_longnames(): return get_asset_names(longnames=True) @dispatcher.add_method def get_holder_count(asset): asset = util.resolve_subasset_longname(self.db, asset) holders = util.holders(self.db, asset, True) addresses = [] for holder in holders: addresses.append(holder['address']) return {asset: len(set(addresses))} @dispatcher.add_method def get_holders(asset): asset = util.resolve_subasset_longname(self.db, asset) holders = util.holders(self.db, asset, True) return holders @dispatcher.add_method def search_raw_transactions(address, unconfirmed=True): return backend.search_raw_transactions(address, unconfirmed=unconfirmed) @dispatcher.add_method def get_unspent_txouts(address, unconfirmed=False, unspent_tx_hash=None, order_by=None): results = backend.get_unspent_txouts(address, unconfirmed=unconfirmed, unspent_tx_hash=unspent_tx_hash) if order_by is None: return results else: order_key = order_by reverse = False if order_key.startswith('-'): order_key = order_key[1:] reverse = True return sorted(results, key=lambda x: x[order_key], reverse=reverse) @dispatcher.add_method def getrawtransaction(tx_hash, verbose=False, skip_missing=False): return backend.getrawtransaction(tx_hash, verbose=verbose, skip_missing=skip_missing) @dispatcher.add_method def getrawtransaction_batch(txhash_list, verbose=False, skip_missing=False): return backend.getrawtransaction_batch(txhash_list, verbose=verbose, skip_missing=skip_missing) @dispatcher.add_method def get_tx_info(tx_hex, block_index=None): # block_index mandatory for transactions before block 335000 source, destination, btc_amount, fee, data, extra = blocks.get_tx_info(tx_hex, block_index=block_index) return source, destination, btc_amount, fee, util.hexlify(data) if data else '' @dispatcher.add_method def unpack(data_hex): data = binascii.unhexlify(data_hex) message_type_id, message = message_type.unpack(data) # TODO: Enabled only for `send`. if message_type_id == send.ID: unpack_method = send.unpack elif message_type_id == enhanced_send.ID: unpack_method = enhanced_send.unpack else: raise APIError('unsupported message type') unpacked = unpack_method(self.db, message, util.CURRENT_BLOCK_INDEX) return message_type_id, unpacked @dispatcher.add_method # TODO: Rename this method. def search_pubkey(pubkeyhash, provided_pubkeys=None): return backend.pubkeyhash_to_pubkey(pubkeyhash, provided_pubkeys=provided_pubkeys) @dispatcher.add_method def get_dispenser_info(tx_hash=None, tx_index=None): cursor = self.db.cursor() if tx_hash is None and tx_index is None: raise APIError("You must provided a tx hash or a tx index") if tx_hash is not None: cursor.execute('SELECT d.*, a.asset_longname FROM dispensers d LEFT JOIN assets a ON a.asset_name = d.asset WHERE tx_hash=:tx_hash', {"tx_hash":tx_hash}) else: cursor.execute('SELECT d.*, a.asset_longname FROM dispensers d LEFT JOIN assets a ON a.asset_name = d.asset WHERE tx_index=:tx_index', {"tx_index":tx_index}) dispensers = cursor.fetchall() if len(dispensers) == 1: dispenser = dispensers[0] oracle_price = "" satoshi_price = "" fiat_price = "" oracle_price_last_updated = "" oracle_fiat_label = "" if dispenser["oracle_address"] != None: fiat_price = util.satoshirate_to_fiat(dispenser["satoshirate"]) oracle_price, oracle_fee, oracle_fiat_label, oracle_price_last_updated = util.get_oracle_last_price(self.db, dispenser["oracle_address"], util.CURRENT_BLOCK_INDEX) if (oracle_price > 0): satoshi_price = math.ceil((fiat_price/oracle_price) * config.UNIT) else: raise APIError("Last oracle price is zero") return { "tx_index": dispenser["tx_index"], "tx_hash": dispenser["tx_hash"], "block_index": dispenser["block_index"], "source": dispenser["source"], "asset": dispenser["asset"], "give_quantity": dispenser["give_quantity"], "escrow_quantity": dispenser["escrow_quantity"], "mainchainrate": dispenser["satoshirate"], "fiat_price": fiat_price, "fiat_unit": oracle_fiat_label, "oracle_price": oracle_price, "satoshi_price": satoshi_price, "status": dispenser["status"], "give_remaining": dispenser["give_remaining"], "oracle_address": dispenser["oracle_address"], "oracle_price_last_updated": oracle_price_last_updated, "asset_longname": dispenser["asset_longname"] } return {} def _set_cors_headers(response): if not config.RPC_NO_ALLOW_CORS: response.headers['Access-Control-Allow-Origin'] = '*' response.headers['Access-Control-Allow-Methods'] = 'GET, POST, OPTIONS' response.headers['Access-Control-Allow-Headers'] = 'DNT,X-Mx-ReqToken,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Authorization' @app.route('/healthz', methods=['GET']) def handle_healthz(): msg, code = 'Healthy', 200 try: latestBlockIndex = backend.getblockcount() check_database_state(self.db, latestBlockIndex) except DatabaseError: msg, code = 'Unhealthy', 503 return flask.Response(msg, code, mimetype='text/plain') @app.route('/', defaults={'args_path': ''}, methods=['GET', 'POST', 'OPTIONS']) @app.route('/', methods=['GET', 'POST', 'OPTIONS']) # Only require authentication if RPC_PASSWORD is set. @conditional_decorator(auth.login_required, hasattr(config, 'RPC_PASSWORD')) def handle_root(args_path): """Handle all paths, decide where to forward the query.""" if args_path == '' or args_path.startswith('api/') or args_path.startswith('API/') or \ args_path.startswith('rpc/') or args_path.startswith('RPC/'): if flask.request.method == 'POST': # Need to get those here because it might not be available in this aux function. request_json = flask.request.get_data().decode('utf-8') response = handle_rpc_post(request_json) return response elif flask.request.method == 'OPTIONS': response = handle_rpc_options() return response else: error = 'Invalid method.' return flask.Response(error, 405, mimetype='application/json') elif args_path.startswith('rest/') or args_path.startswith('REST/'): if flask.request.method == 'GET' or flask.request.method == 'POST': # Pass the URL path without /REST/ part and Flask request object. rest_path = args_path.split('/', 1)[1] response = handle_rest(rest_path, flask.request) return response else: error = 'Invalid method.' return flask.Response(error, 405, mimetype='application/json') else: # Not found return flask.Response(None, 404, mimetype='application/json') ###################### # JSON-RPC API ###################### def handle_rpc_options(): response = flask.Response('', 204) _set_cors_headers(response) return response def handle_rpc_post(request_json): """Handle /API/ POST route. Call relevant get_rows/create_transaction wrapper.""" # Check for valid request format. try: request_data = json.loads(request_json) assert 'id' in request_data and request_data['jsonrpc'] == "2.0" and request_data['method'] # params may be omitted except: obj_error = jsonrpc.exceptions.JSONRPCInvalidRequest(data="Invalid JSON-RPC 2.0 request format") return flask.Response(obj_error.json.encode(), 400, mimetype='application/json') # Only arguments passed as a `dict` are supported. if request_data.get('params', None) and not isinstance(request_data['params'], dict): obj_error = jsonrpc.exceptions.JSONRPCInvalidRequest( data='Arguments must be passed as a JSON object (list of unnamed arguments not supported)') return flask.Response(obj_error.json.encode(), 400, mimetype='application/json') # Return an error if the API Status Poller checks fail. if not config.FORCE and current_api_status_code: return flask.Response(current_api_status_response_json, 503, mimetype='application/json') # Answer request normally. # NOTE: `UnboundLocalError: local variable 'output' referenced before assignment` means the method doesn’t return anything. jsonrpc_response = jsonrpc.JSONRPCResponseManager.handle(request_json, dispatcher) response = flask.Response(jsonrpc_response.json.encode(), 200, mimetype='application/json') _set_cors_headers(response) return response ###################### # HTTP REST API ###################### def handle_rest(path_args, flask_request): """Handle /REST/ route. Query the database using get_rows or create transaction using compose_transaction.""" url_action = flask_request.path.split('/')[-1] if url_action == 'compose': compose = True elif url_action == 'get': compose = False else: error = 'Invalid action "%s".' % url_action return flask.Response(error, 400, mimetype='application/json') # Get all arguments passed via URL. url_args = path_args.split('/') try: query_type = url_args.pop(0).lower() except IndexError: error = 'No query_type provided.' return flask.Response(error, 400, mimetype='application/json') # Check if message type or table name are valid. if (compose and query_type not in API_TRANSACTIONS) or \ (not compose and query_type not in API_TABLES): error = 'No such query type in supported queries: "%s".' % query_type return flask.Response(error, 400, mimetype='application/json') # Parse the additional arguments. extra_args = flask_request.args.items() query_data = {} if compose: common_args = {} transaction_args = {} for (key, value) in extra_args: # Determine value type. try: value = int(value) except ValueError: try: value = float(value) except ValueError: pass # Split keys into common and transaction-specific arguments. Discard the privkey. if key in COMMONS_ARGS: common_args[key] = value elif key == 'privkey': pass else: transaction_args[key] = value # Must have some additional transaction arguments. if not len(transaction_args): error = 'No transaction arguments provided.' return flask.Response(error, 400, mimetype='application/json') # Compose the transaction. try: query_data = compose_transaction(self.db, name=query_type, params=transaction_args, **common_args) except (script.AddressError, exceptions.ComposeError, exceptions.TransactionError, exceptions.BalanceError) as error: error_msg = logging.warning("{} -- error composing {} transaction via API: {}".format( str(error.__class__.__name__), query_type, str(error))) return flask.Response(error_msg, 400, mimetype='application/json') else: # Need to de-generate extra_args to pass it through. query_args = dict([item for item in extra_args]) operator = query_args.pop('op', 'AND') # Put the data into specific dictionary format. data_filter = [{'field': key, 'op': '==', 'value': value} for (key, value) in query_args.items()] # Run the query. try: query_data = get_rows(self.db, table=query_type, filters=data_filter, filterop=operator) except APIError as error: return flask.Response(str(error), 400, mimetype='application/json') # See which encoding to choose from. file_format = flask_request.headers['Accept'] # JSON as default. if file_format == 'application/json' or file_format == '*/*': response_data = json.dumps(query_data) elif file_format == 'application/xml': # Add document root for XML. Note when xmltodict encounters a list, it produces separate tags for every item. # Hence we end up with multiple query_type roots. To combat this we put it in a separate item dict. response_data = serialize_to_xml({query_type: {'item': query_data}}) else: error = 'Invalid file format: "%s".' % file_format return flask.Response(error, 400, mimetype='application/json') response = flask.Response(response_data, 200, mimetype=file_format) return response # Init the HTTP Server. init_api_access_log(app) # Run app server (blocking) self.is_ready = True app.run(host=config.RPC_HOST, port=config.RPC_PORT, threaded=True) self.db.close() return # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 #### arc4.py import binascii from Crypto.Cipher import ARC4 def init_arc4(seed): if isinstance(seed, str): seed = binascii.unhexlify(seed) return ARC4.new(seed) #### blocks.py """ Initialise database. Sieve blockchain for Counterparty transactions, and add them to the database. """ import os import time import binascii import struct import decimal D = decimal.Decimal import logging logger = logging.getLogger(__name__) import collections import platform import apsw import csv import copy import http import bitcoin as bitcoinlib from bitcoin.core.script import CScriptInvalidError from counterpartylib.lib import config from counterpartylib.lib import exceptions from counterpartylib.lib import util from counterpartylib.lib import check from counterpartylib.lib import script from counterpartylib.lib import backend from counterpartylib.lib import log from counterpartylib.lib import database from counterpartylib.lib import message_type from counterpartylib.lib import arc4 from counterpartylib.lib.transaction_helper import p2sh_encoding from .messages import (send, order, btcpay, issuance, broadcast, bet, dividend, burn, cancel, rps, rpsresolve, destroy, sweep, dispenser) from .messages.versions import enhanced_send, mpma from .kickstart.blocks_parser import BlockchainParser, ChainstateParser from .kickstart.utils import ib2h from .exceptions import DecodeError, BTCOnlyError # Order matters for FOREIGN KEY constraints. TABLES = ['credits', 'debits', 'messages'] + \ ['bet_match_resolutions', 'order_match_expirations', 'order_matches', 'order_expirations', 'orders', 'bet_match_expirations', 'bet_matches', 'bet_expirations', 'bets', 'broadcasts', 'btcpays', 'burns', 'cancels', 'dividends', 'issuances', 'sends', 'rps_match_expirations', 'rps_expirations', 'rpsresolves', 'rps_matches', 'rps', 'destructions', 'assets', 'addresses', 'sweeps', 'dispensers', 'dispenses'] # Compose list of tables tracked by undolog UNDOLOG_TABLES = copy.copy(TABLES) UNDOLOG_TABLES.remove('messages') UNDOLOG_TABLES += ['balances'] CURR_DIR = os.path.dirname(os.path.realpath(__file__)) with open(CURR_DIR + '/../mainnet_burns.csv', 'r') as f: mainnet_burns_reader = csv.DictReader(f) MAINNET_BURNS = {} for line in mainnet_burns_reader: MAINNET_BURNS[line['tx_hash']] = line def parse_tx(db, tx): """Parse the transaction, return True for success.""" cursor = db.cursor() try: with db: # Only one source and one destination allowed for now. if len(tx['source'].split('-')) > 1: return if tx['destination']: if len(tx['destination'].split('-')) > 1: return # Burns. if tx['destination'] == config.UNSPENDABLE: burn.parse(db, tx, MAINNET_BURNS) return if len(tx['data']) > 1: try: message_type_id, message = message_type.unpack(tx['data'], tx['block_index']) except struct.error: # Deterministically raised. message_type_id = None message = None else: message_type_id = None message = None # Protocol change. rps_enabled = tx['block_index'] >= 308500 or config.TESTNET or config.REGTEST if message_type_id == send.ID: send.parse(db, tx, message) elif message_type_id == enhanced_send.ID and util.enabled('enhanced_sends', block_index=tx['block_index']): enhanced_send.parse(db, tx, message) elif message_type_id == mpma.ID and util.enabled('mpma_sends', block_index=tx['block_index']): mpma.parse(db, tx, message) elif message_type_id == order.ID: order.parse(db, tx, message) elif message_type_id == btcpay.ID: btcpay.parse(db, tx, message) elif message_type_id == issuance.ID: issuance.parse(db, tx, message, message_type_id) elif message_type_id == issuance.SUBASSET_ID and util.enabled('subassets', block_index=tx['block_index']): issuance.parse(db, tx, message, message_type_id) elif message_type_id == broadcast.ID: broadcast.parse(db, tx, message) elif message_type_id == bet.ID: bet.parse(db, tx, message) elif message_type_id == dividend.ID: dividend.parse(db, tx, message) elif message_type_id == cancel.ID: cancel.parse(db, tx, message) elif message_type_id == rps.ID and rps_enabled: rps.parse(db, tx, message) elif message_type_id == rpsresolve.ID and rps_enabled: rpsresolve.parse(db, tx, message) elif message_type_id == destroy.ID and util.enabled('destroy_reactivated', block_index=tx['block_index']): destroy.parse(db, tx, message) elif message_type_id == sweep.ID and util.enabled('sweep_send', block_index=tx['block_index']): sweep.parse(db, tx, message) elif message_type_id == dispenser.ID and util.enabled('dispensers', block_index=tx['block_index']): dispenser.parse(db, tx, message) elif message_type_id == dispenser.DISPENSE_ID and util.enabled('dispensers', block_index=tx['block_index']): dispenser.dispense(db, tx) else: cursor.execute('''UPDATE transactions \ SET supported=? \ WHERE tx_hash=?''', (False, tx['tx_hash'])) if tx['block_index'] != config.MEMPOOL_BLOCK_INDEX: logger.info('Unsupported transaction: hash {}; data {}'.format(tx['tx_hash'], tx['data'])) cursor.close() return False # NOTE: for debugging (check asset conservation after every `N` transactions). # if not tx['tx_index'] % N: # check.asset_conservation(db) return True except Exception as e: raise exceptions.ParseTransactionError("%s" % e) finally: cursor.close() def parse_block(db, block_index, block_time, previous_ledger_hash=None, ledger_hash=None, previous_txlist_hash=None, txlist_hash=None, previous_messages_hash=None): """Parse the block, return hash of new ledger, txlist and messages. The unused arguments `ledger_hash` and `txlist_hash` are for the test suite. """ undolog_cursor = db.cursor() #remove the row tracer and exec tracer on this cursor, so we don't utilize them with undolog operations... undolog_cursor.setexectrace(None) undolog_cursor.setrowtrace(None) util.BLOCK_LEDGER = [] database.BLOCK_MESSAGES = [] assert block_index == util.CURRENT_BLOCK_INDEX # Remove undolog records for any block older than we should be tracking undolog_oldest_block_index = block_index - config.UNDOLOG_MAX_PAST_BLOCKS first_undo_index = list(undolog_cursor.execute('''SELECT first_undo_index FROM undolog_block WHERE block_index == ?''', (undolog_oldest_block_index,))) if len(first_undo_index) == 1 and first_undo_index[0] is not None: undolog_cursor.execute('''DELETE FROM undolog WHERE undo_index < ?''', (first_undo_index[0][0],)) undolog_cursor.execute('''DELETE FROM undolog_block WHERE block_index < ?''', (undolog_oldest_block_index,)) # Set undolog barrier for this block if block_index != config.BLOCK_FIRST: undolog_cursor.execute('''INSERT OR REPLACE INTO undolog_block(block_index, first_undo_index) SELECT ?, seq+1 FROM SQLITE_SEQUENCE WHERE name='undolog' ''', (block_index,)) else: undolog_cursor.execute('''INSERT OR REPLACE INTO undolog_block(block_index, first_undo_index) VALUES(?,?)''', (block_index, 1,)) undolog_cursor.close() # Expire orders, bets and rps. order.expire(db, block_index) bet.expire(db, block_index, block_time) rps.expire(db, block_index) # Parse transactions, sorting them by type. cursor = db.cursor() cursor.execute('''SELECT * FROM transactions \ WHERE block_index=? ORDER BY tx_index''', (block_index,)) txlist = [] for tx in list(cursor): try: parse_tx(db, tx) txlist.append('{}{}{}{}{}{}'.format(tx['tx_hash'], tx['source'], tx['destination'], tx['btc_amount'], tx['fee'], binascii.hexlify(tx['data']).decode('UTF-8'))) except exceptions.ParseTransactionError as e: logger.warn('ParseTransactionError for tx %s: %s' % (tx['tx_hash'], e)) raise e #pass cursor.close() # Calculate consensus hashes. new_txlist_hash, found_txlist_hash = check.consensus_hash(db, 'txlist_hash', previous_txlist_hash, txlist) new_ledger_hash, found_ledger_hash = check.consensus_hash(db, 'ledger_hash', previous_ledger_hash, util.BLOCK_LEDGER) new_messages_hash, found_messages_hash = check.consensus_hash(db, 'messages_hash', previous_messages_hash, database.BLOCK_MESSAGES) return new_ledger_hash, new_txlist_hash, new_messages_hash, found_messages_hash def initialise(db): """Initialise data, create and populate the database.""" cursor = db.cursor() # Blocks cursor.execute('''CREATE TABLE IF NOT EXISTS blocks( block_index INTEGER UNIQUE, block_hash TEXT UNIQUE, block_time INTEGER, previous_block_hash TEXT UNIQUE, difficulty INTEGER, PRIMARY KEY (block_index, block_hash)) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS block_index_idx ON blocks (block_index) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS index_hash_idx ON blocks (block_index, block_hash) ''') # SQLite can’t do `ALTER TABLE IF COLUMN NOT EXISTS`. columns = [column['name'] for column in cursor.execute('''PRAGMA table_info(blocks)''')] if 'ledger_hash' not in columns: cursor.execute('''ALTER TABLE blocks ADD COLUMN ledger_hash TEXT''') if 'txlist_hash' not in columns: cursor.execute('''ALTER TABLE blocks ADD COLUMN txlist_hash TEXT''') if 'messages_hash' not in columns: cursor.execute('''ALTER TABLE blocks ADD COLUMN messages_hash TEXT''') if 'previous_block_hash' not in columns: cursor.execute('''ALTER TABLE blocks ADD COLUMN previous_block_hash TEXT''') if 'difficulty' not in columns: cursor.execute('''ALTER TABLE blocks ADD COLUMN difficulty TEXT''') # Check that first block in DB is BLOCK_FIRST. cursor.execute('''SELECT * from blocks ORDER BY block_index''') blocks = list(cursor) if len(blocks): if blocks[0]['block_index'] != config.BLOCK_FIRST: raise exceptions.DatabaseError('First block in database is not block {}.'.format(config.BLOCK_FIRST)) # Transactions cursor.execute('''CREATE TABLE IF NOT EXISTS transactions( tx_index INTEGER UNIQUE, tx_hash TEXT UNIQUE, block_index INTEGER, block_hash TEXT, block_time INTEGER, source TEXT, destination TEXT, btc_amount INTEGER, fee INTEGER, data BLOB, supported BOOL DEFAULT 1, FOREIGN KEY (block_index, block_hash) REFERENCES blocks(block_index, block_hash), PRIMARY KEY (tx_index, tx_hash, block_index)) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS block_index_idx ON transactions (block_index) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS tx_index_idx ON transactions (tx_index) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS tx_hash_idx ON transactions (tx_hash) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS index_index_idx ON transactions (block_index, tx_index) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS index_hash_index_idx ON transactions (tx_index, tx_hash, block_index) ''') # Purge database of blocks, transactions from before BLOCK_FIRST. cursor.execute('''DELETE FROM blocks WHERE block_index < ?''', (config.BLOCK_FIRST,)) cursor.execute('''DELETE FROM transactions WHERE block_index < ?''', (config.BLOCK_FIRST,)) # (Valid) debits cursor.execute('''CREATE TABLE IF NOT EXISTS debits( block_index INTEGER, address TEXT, asset TEXT, quantity INTEGER, action TEXT, event TEXT, FOREIGN KEY (block_index) REFERENCES blocks(block_index)) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS address_idx ON debits (address) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS asset_idx ON debits (asset) ''') # (Valid) credits cursor.execute('''CREATE TABLE IF NOT EXISTS credits( block_index INTEGER, address TEXT, asset TEXT, quantity INTEGER, calling_function TEXT, event TEXT, FOREIGN KEY (block_index) REFERENCES blocks(block_index)) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS address_idx ON credits (address) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS asset_idx ON credits (asset) ''') # Balances cursor.execute('''CREATE TABLE IF NOT EXISTS balances( address TEXT, asset TEXT, quantity INTEGER) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS address_asset_idx ON balances (address, asset) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS address_idx ON balances (address) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS asset_idx ON balances (asset) ''') # Assets # TODO: Store more asset info here?! cursor.execute('''CREATE TABLE IF NOT EXISTS assets( asset_id TEXT UNIQUE, asset_name TEXT UNIQUE, block_index INTEGER, asset_longname TEXT) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS name_idx ON assets (asset_name) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS id_idx ON assets (asset_id) ''') # Add asset_longname for sub-assets # SQLite can’t do `ALTER TABLE IF COLUMN NOT EXISTS`. columns = [column['name'] for column in cursor.execute('''PRAGMA table_info(assets)''')] if 'asset_longname' not in columns: cursor.execute('''ALTER TABLE assets ADD COLUMN asset_longname TEXT''') cursor.execute('''CREATE UNIQUE INDEX IF NOT EXISTS asset_longname_idx ON assets(asset_longname)''') cursor.execute('''SELECT * FROM assets WHERE asset_name = ?''', ('BTC',)) if not list(cursor): cursor.execute('''INSERT INTO assets VALUES (?,?,?,?)''', ('0', 'BTC', None, None)) cursor.execute('''INSERT INTO assets VALUES (?,?,?,?)''', ('1', 'XCP', None, None)) # Addresses # Leaving this here because in the future this could work for other things besides broadcast cursor.execute('''CREATE TABLE IF NOT EXISTS addresses( address TEXT UNIQUE, options INTEGER, block_index INTEGER) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS addresses_idx ON addresses (address) ''') # Consolidated send.initialise(db) destroy.initialise(db) order.initialise(db) btcpay.initialise(db) issuance.initialise(db) broadcast.initialise(db) bet.initialise(db) dividend.initialise(db) burn.initialise(db) cancel.initialise(db) rps.initialise(db) rpsresolve.initialise(db) sweep.initialise(db) dispenser.initialise(db) # Messages cursor.execute('''CREATE TABLE IF NOT EXISTS messages( message_index INTEGER PRIMARY KEY, block_index INTEGER, command TEXT, category TEXT, bindings TEXT, timestamp INTEGER) ''') # TODO: FOREIGN KEY (block_index) REFERENCES blocks(block_index) DEFERRABLE INITIALLY DEFERRED) cursor.execute('''CREATE INDEX IF NOT EXISTS block_index_idx ON messages (block_index) ''') cursor.execute('''CREATE INDEX IF NOT EXISTS block_index_message_index_idx ON messages (block_index, message_index) ''') # Create undolog tables cursor.execute('''CREATE TABLE IF NOT EXISTS undolog( undo_index INTEGER PRIMARY KEY AUTOINCREMENT, sql TEXT) ''') cursor.execute('''CREATE TABLE IF NOT EXISTS undolog_block( block_index INTEGER PRIMARY KEY, first_undo_index INTEGER) ''') # Create undolog triggers for all tables in TABLES list, plus the 'balances' table for table in UNDOLOG_TABLES: columns = [column['name'] for column in cursor.execute('''PRAGMA table_info({})'''.format(table))] cursor.execute('''CREATE TRIGGER IF NOT EXISTS _{}_insert AFTER INSERT ON {} BEGIN INSERT INTO undolog VALUES(NULL, 'DELETE FROM {} WHERE rowid='||new.rowid); END; '''.format(table, table, table)) columns_parts = ["{}='||quote(old.{})||'".format(c, c) for c in columns] cursor.execute('''CREATE TRIGGER IF NOT EXISTS _{}_update AFTER UPDATE ON {} BEGIN INSERT INTO undolog VALUES(NULL, 'UPDATE {} SET {} WHERE rowid='||old.rowid); END; '''.format(table, table, table, ','.join(columns_parts))) columns_parts = ["'||quote(old.{})||'".format(c) for c in columns] cursor.execute('''CREATE TRIGGER IF NOT EXISTS _{}_delete BEFORE DELETE ON {} BEGIN INSERT INTO undolog VALUES(NULL, 'INSERT INTO {}(rowid,{}) VALUES('||old.rowid||',{})'); END; '''.format(table, table, table, ','.join(columns), ','.join(columns_parts))) # Drop undolog tables on messages table if they exist (fix for adding them in 9.52.0) for trigger_type in ('insert', 'update', 'delete'): cursor.execute("DROP TRIGGER IF EXISTS _messages_{}".format(trigger_type)) # Mempool messages # NOTE: `status`, 'block_index` are removed from bindings. cursor.execute('''DROP TABLE IF EXISTS mempool''') cursor.execute('''CREATE TABLE mempool( tx_hash TEXT, command TEXT, category TEXT, bindings TEXT, timestamp INTEGER) ''') cursor.close() def get_tx_info(tx_hex, block_parser=None, block_index=None, db=None): """Get the transaction info. Returns normalized None data for DecodeError and BTCOnlyError.""" try: return _get_tx_info(tx_hex, block_parser, block_index) except DecodeError as e: return b'', None, None, None, None, None except BTCOnlyError as e: # NOTE: For debugging, logger.debug('Could not decode: ' + str(e)) if util.enabled('dispensers', block_index): try: return b'', None, None, None, None, _get_swap_tx(e.decodedTx, block_parser, block_index, db=db) except: # (DecodeError, backend.indexd.BackendRPCError) as e: return b'', None, None, None, None, None else: return b'', None, None, None, None, None def _get_swap_tx(decoded_tx, block_parser=None, block_index=None, db=None): def get_pubkeyhash(scriptpubkey): asm = script.get_asm(scriptpubkey) if len(asm) > 0: if asm[0] == "OP_DUP": if len(asm) != 5 or asm[1] != 'OP_HASH160' or asm[3] != 'OP_EQUALVERIFY' or asm[4] != 'OP_CHECKSIG': return False else: return {"pubkeyhash":asm[2],"address_version":config.ADDRESSVERSION} elif (asm[0] == "OP_HASH160") and util.enabled('p2sh_dispensers_support'): if len(asm) != 3 or asm[-1] != 'OP_EQUAL': return False else: return {"pubkeyhash":asm[1],"address_version":config.P2SH_ADDRESSVERSION} return False def get_address(scriptpubkey): if util.enabled('correct_segwit_txids') and scriptpubkey.is_witness_v0_keyhash(): pubkey = scriptpubkey[2:] address = str(bitcoinlib.bech32.CBech32Data.from_bytes(0, pubkey)) return address else: pubkeyhashdict = get_pubkeyhash(scriptpubkey) if not pubkeyhashdict: return False pubkeyhash = pubkeyhashdict["pubkeyhash"] address_version = pubkeyhashdict["address_version"] pubkeyhash = binascii.hexlify(pubkeyhash).decode('utf-8') address = script.base58_check_encode(pubkeyhash, address_version) # Test decoding of address. if address != config.UNSPENDABLE and binascii.unhexlify(bytes(pubkeyhash, 'utf-8')) != script.base58_check_decode(address, address_version): return False return address outputs = [] check_sources = db == None # If we didn't get passed a database cursor, assume we have to check for dispenser for vout in decoded_tx.vout: address = get_address(vout.scriptPubKey) destination = None btc_amount = None if address: destination = address btc_amount = vout.nValue elif util.enabled('hotfix_dispensers_with_non_p2pkh'): asm = script.get_asm(vout.scriptPubKey) if asm[-1] == 'OP_CHECKSIG': destination, new_data = decode_checksig(asm, decoded_tx) elif asm[-1] == 'OP_CHECKMULTISIG': destination, new_data = decode_checkmultisig(asm, decoded_tx) elif asm[0] == 'OP_HASH160' and asm[-1] == 'OP_EQUAL' and len(asm) == 3: destination, new_data = decode_scripthash(asm) elif asm[0] == 'OP_RETURN': pass #Just ignore. elif util.enabled('segwit_support') and asm[0] == 0: # Segwit output destination, new_data = decode_p2w(vout.scriptPubKey) else: logger.error('unrecognised scriptPubkey. Just ignore this: ' + str(asm)) if destination and not new_data: amount = vout.nValue else: logger.error('cannot parse destination address or new_data found: ' + str(asm)) if db != None and dispenser.is_dispensable(db, destination, btc_amount): check_sources = True outputs.append((destination, btc_amount)) # Collect all (unique) source addresses. # if we haven't found them yet sources = [] if check_sources: for vin in decoded_tx.vin[:]: # Loop through inputs. if block_parser: vin_tx = block_parser.read_raw_transaction(ib2h(vin.prevout.hash)) vin_ctx = backend.deserialize(vin_tx['__data__']) else: vin_tx = backend.getrawtransaction(ib2h(vin.prevout.hash)) # TODO: Biggest penalty on parsing is here vin_ctx = backend.deserialize(vin_tx) vout = vin_ctx.vout[vin.prevout.n] asm = script.get_asm(vout.scriptPubKey) if asm[-1] == 'OP_CHECKSIG': new_source, new_data = decode_checksig(asm, decoded_tx) if new_data or not new_source: raise DecodeError('data in source') elif asm[-1] == 'OP_CHECKMULTISIG': new_source, new_data = decode_checkmultisig(asm, decoded_tx) if new_data or not new_source: raise DecodeError('data in source') elif asm[0] == 'OP_HASH160' and asm[-1] == 'OP_EQUAL' and len(asm) == 3: new_source, new_data = decode_scripthash(asm) if new_data or not new_source: raise DecodeError('data in source') elif util.enabled('segwit_support') and asm[0] == 0: # Segwit output # Get the full transaction data for this input transaction. new_source, new_data = decode_p2w(vout.scriptPubKey) else: raise DecodeError('unrecognised source type') # old; append to sources, results in invalid addresses # new; first found source is source, the rest can be anything (to fund the TX for example) if not (util.enabled('first_input_is_source') and len(sources)): # Collect unique sources. if new_source not in sources: sources.append(new_source) return (sources, outputs) def _get_tx_info(tx_hex, block_parser=None, block_index=None, p2sh_is_segwit=False): """Get the transaction info. Calls one of two subfunctions depending on signature type.""" if not block_index: block_index = util.CURRENT_BLOCK_INDEX if util.enabled('p2sh_addresses', block_index=block_index): # Protocol change. return get_tx_info3(tx_hex, block_parser=block_parser, p2sh_is_segwit=p2sh_is_segwit) elif util.enabled('multisig_addresses', block_index=block_index): # Protocol change. return get_tx_info2(tx_hex, block_parser=block_parser) else: return get_tx_info1(tx_hex, block_index, block_parser=block_parser) def get_tx_info1(tx_hex, block_index, block_parser=None): """Get singlesig transaction info. The destination, if it exists, always comes before the data output; the change, if it exists, always comes after. """ ctx = backend.deserialize(tx_hex) def get_pubkeyhash(scriptpubkey): asm = script.get_asm(scriptpubkey) if len(asm) != 5 or asm[0] != 'OP_DUP' or asm[1] != 'OP_HASH160' or asm[3] != 'OP_EQUALVERIFY' or asm[4] != 'OP_CHECKSIG': return False return asm[2] def get_address(scriptpubkey): pubkeyhash = get_pubkeyhash(scriptpubkey) if not pubkeyhash: return False pubkeyhash = binascii.hexlify(pubkeyhash).decode('utf-8') address = script.base58_check_encode(pubkeyhash, config.ADDRESSVERSION) # Test decoding of address. if address != config.UNSPENDABLE and binascii.unhexlify(bytes(pubkeyhash, 'utf-8')) != script.base58_check_decode(address, config.ADDRESSVERSION): return False return address # Fee is the input values minus output values. fee = 0 # Get destination output and data output. destination, btc_amount, data = None, None, b'' pubkeyhash_encoding = False for vout in ctx.vout: fee -= vout.nValue # Sum data chunks to get data. (Can mix OP_RETURN and multi-sig.) asm = script.get_asm(vout.scriptPubKey) if len(asm) == 2 and asm[0] == 'OP_RETURN': # OP_RETURN if type(asm[1]) != bytes: continue data_chunk = asm[1] data += data_chunk elif len(asm) == 5 and asm[0] == 1 and asm[3] == 2 and asm[4] == 'OP_CHECKMULTISIG': # Multi-sig if type(asm[2]) != bytes: continue data_pubkey = asm[2] data_chunk_length = data_pubkey[0] # No ord() necessary. data_chunk = data_pubkey[1:data_chunk_length + 1] data += data_chunk elif len(asm) == 5 and (block_index >= 293000 or config.TESTNET or config.REGTEST): # Protocol change. # Be strict. pubkeyhash = get_pubkeyhash(vout.scriptPubKey) if not pubkeyhash: continue if ctx.is_coinbase(): raise DecodeError('coinbase transaction') obj1 = arc4.init_arc4(ctx.vin[0].prevout.hash[::-1]) data_pubkey = obj1.decrypt(pubkeyhash) if data_pubkey[1:9] == config.PREFIX or pubkeyhash_encoding: pubkeyhash_encoding = True data_chunk_length = data_pubkey[0] # No ord() necessary. data_chunk = data_pubkey[1:data_chunk_length + 1] if data_chunk[-8:] == config.PREFIX: data += data_chunk[:-8] break else: data += data_chunk # Destination is the first output before the data. if not destination and not btc_amount and not data: address = get_address(vout.scriptPubKey) if address: destination = address btc_amount = vout.nValue # Check for, and strip away, prefix (except for burns). if destination == config.UNSPENDABLE: pass elif data[:len(config.PREFIX)] == config.PREFIX: data = data[len(config.PREFIX):] else: raise DecodeError('no prefix') # Only look for source if data were found or destination is UNSPENDABLE, for speed. if not data and destination != config.UNSPENDABLE: raise BTCOnlyError('no data and not unspendable') # Collect all possible source addresses; ignore coinbase transactions and anything but the simplest Pay‐to‐PubkeyHash inputs. source_list = [] for vin in ctx.vin[:]: # Loop through input transactions. if vin.prevout.is_null(): raise DecodeError('coinbase transaction') # Get the full transaction data for this input transaction. if block_parser: vin_tx = block_parser.read_raw_transaction(ib2h(vin.prevout.hash)) vin_ctx = backend.deserialize(vin_tx['__data__']) else: vin_tx = backend.getrawtransaction(ib2h(vin.prevout.hash)) vin_ctx = backend.deserialize(vin_tx) vout = vin_ctx.vout[vin.prevout.n] fee += vout.nValue address = get_address(vout.scriptPubKey) if not address: raise DecodeError('invalid scriptpubkey') else: source_list.append(address) # Require that all possible source addresses be the same. if all(x == source_list[0] for x in source_list): source = source_list[0] else: source = None return source, destination, btc_amount, fee, data, None def get_tx_info3(tx_hex, block_parser=None, p2sh_is_segwit=False): return get_tx_info2(tx_hex, block_parser=block_parser, p2sh_support=True, p2sh_is_segwit=p2sh_is_segwit) def arc4_decrypt(cyphertext, ctx): '''Un‐obfuscate. Initialise key once per attempt.''' key = arc4.init_arc4(ctx.vin[0].prevout.hash[::-1]) return key.decrypt(cyphertext) def get_opreturn(asm): if len(asm) == 2 and asm[0] == 'OP_RETURN': pubkeyhash = asm[1] if type(pubkeyhash) == bytes: return pubkeyhash raise DecodeError('invalid OP_RETURN') def decode_opreturn(asm, ctx): chunk = get_opreturn(asm) chunk = arc4_decrypt(chunk, ctx) if chunk[:len(config.PREFIX)] == config.PREFIX: # Data destination, data = None, chunk[len(config.PREFIX):] else: raise DecodeError('unrecognised OP_RETURN output') return destination, data def decode_checksig(asm, ctx): pubkeyhash = script.get_checksig(asm) chunk = arc4_decrypt(pubkeyhash, ctx) if chunk[1:len(config.PREFIX) + 1] == config.PREFIX: # Data # Padding byte in each output (instead of just in the last one) so that encoding methods may be mixed. Also, it’s just not very much data. chunk_length = chunk[0] chunk = chunk[1:chunk_length + 1] destination, data = None, chunk[len(config.PREFIX):] else: # Destination pubkeyhash = binascii.hexlify(pubkeyhash).decode('utf-8') destination, data = script.base58_check_encode(pubkeyhash, config.ADDRESSVERSION), None return destination, data def decode_scripthash(asm): destination = script.base58_check_encode(binascii.hexlify(asm[1]).decode('utf-8'), config.P2SH_ADDRESSVERSION) return destination, None def decode_checkmultisig(asm, ctx): pubkeys, signatures_required = script.get_checkmultisig(asm) chunk = b'' for pubkey in pubkeys[:-1]: # (No data in last pubkey.) chunk += pubkey[1:-1] # Skip sign byte and nonce byte. chunk = arc4_decrypt(chunk, ctx) if chunk[1:len(config.PREFIX) + 1] == config.PREFIX: # Data # Padding byte in each output (instead of just in the last one) so that encoding methods may be mixed. Also, it’s just not very much data. chunk_length = chunk[0] chunk = chunk[1:chunk_length + 1] destination, data = None, chunk[len(config.PREFIX):] else: # Destination pubkeyhashes = [script.pubkey_to_pubkeyhash(pubkey) for pubkey in pubkeys] destination, data = script.construct_array(signatures_required, pubkeyhashes, len(pubkeyhashes)), None return destination, data def decode_p2w(script_pubkey): try: bech32 = bitcoinlib.bech32.CBech32Data.from_bytes(0, script_pubkey[2:22]) return str(bech32), None except TypeError as e: raise DecodeError('bech32 decoding error') def get_tx_info2(tx_hex, block_parser=None, p2sh_support=False, p2sh_is_segwit=False): """Get multisig transaction info. The destinations, if they exists, always comes before the data output; the change, if it exists, always comes after. """ # Decode transaction binary. ctx = backend.deserialize(tx_hex) # Ignore coinbase transactions. if ctx.is_coinbase(): raise DecodeError('coinbase transaction') # Get destinations and data outputs. destinations, btc_amount, fee, data = [], 0, 0, b'' for vout in ctx.vout: # Fee is the input values minus output values. output_value = vout.nValue fee -= output_value # Ignore transactions with invalid script. try: asm = script.get_asm(vout.scriptPubKey) except CScriptInvalidError as e: raise DecodeError(e) if asm[0] == 'OP_RETURN': new_destination, new_data = decode_opreturn(asm, ctx) elif asm[-1] == 'OP_CHECKSIG': new_destination, new_data = decode_checksig(asm, ctx) elif asm[-1] == 'OP_CHECKMULTISIG': try: new_destination, new_data = decode_checkmultisig(asm, ctx) except: raise DecodeError('unrecognised output type') elif p2sh_support and asm[0] == 'OP_HASH160' and asm[-1] == 'OP_EQUAL' and len(asm) == 3: new_destination, new_data = decode_scripthash(asm) elif util.enabled('segwit_support') and asm[0] == 0: # Segwit Vout, second param is redeemScript #redeemScript = asm[1] new_destination, new_data = decode_p2w(vout.scriptPubKey) else: raise DecodeError('unrecognised output type') assert not (new_destination and new_data) assert new_destination != None or new_data != None # `decode_*()` should never return `None, None`. if util.enabled('null_data_check'): if new_data == []: raise DecodeError('new destination is `None`') # All destinations come before all data. if not data and not new_data and destinations != [config.UNSPENDABLE,]: destinations.append(new_destination) btc_amount += output_value else: if new_destination: # Change. break else: # Data. data += new_data # source can be determined by parsing the p2sh_data transaction # or from the first spent output sources = [] # P2SH encoding signalling p2sh_encoding_source = None if util.enabled('p2sh_encoding') and data == b'P2SH': data = b'' for vin in ctx.vin: if util.enabled("prevout_segwit_fix"): vin_tx = backend.getrawtransaction(ib2h(vin.prevout.hash)) vin_ctx = backend.deserialize(vin_tx) prevout_is_segwit = vin_ctx.has_witness() else: prevout_is_segwit = p2sh_is_segwit # Ignore transactions with invalid script. try: asm = script.get_asm(vin.scriptSig) except CScriptInvalidError as e: raise DecodeError(e) new_source, new_destination, new_data = p2sh_encoding.decode_p2sh_input(asm, p2sh_is_segwit=prevout_is_segwit) # this could be a p2sh source address with no encoded data if new_data is None: continue; if new_source is not None: if p2sh_encoding_source is not None and new_source != p2sh_encoding_source: # this p2sh data input has a bad source address raise DecodeError('inconsistent p2sh inputs') p2sh_encoding_source = new_source assert not new_destination data += new_data # Only look for source if data were found or destination is `UNSPENDABLE`, # for speed. if not data and destinations != [config.UNSPENDABLE,]: raise BTCOnlyError('no data and not unspendable', ctx) # Collect all (unique) source addresses. # if we haven't found them yet for vin in ctx.vin[:]: # Loop through inputs. # Get the full transaction data for this input transaction. if block_parser: vin_tx = block_parser.read_raw_transaction(ib2h(vin.prevout.hash)) vin_ctx = backend.deserialize(vin_tx['__data__']) else: vin_tx = backend.getrawtransaction(ib2h(vin.prevout.hash)) vin_ctx = backend.deserialize(vin_tx) vout = vin_ctx.vout[vin.prevout.n] fee += vout.nValue asm = script.get_asm(vout.scriptPubKey) if asm[-1] == 'OP_CHECKSIG': new_source, new_data = decode_checksig(asm, ctx) if new_data or not new_source: raise DecodeError('data in source') elif asm[-1] == 'OP_CHECKMULTISIG': new_source, new_data = decode_checkmultisig(asm, ctx) if new_data or not new_source: raise DecodeError('data in source') elif p2sh_support and asm[0] == 'OP_HASH160' and asm[-1] == 'OP_EQUAL' and len(asm) == 3: new_source, new_data = decode_scripthash(asm) if new_data or not new_source: raise DecodeError('data in source') elif util.enabled('segwit_support') and asm[0] == 0: # Segwit output new_source, new_data = decode_p2w(vout.scriptPubKey) else: raise DecodeError('unrecognised source type') # old; append to sources, results in invalid addresses # new; first found source is source, the rest can be anything (to fund the TX for example) if not (util.enabled('first_input_is_source') and len(sources)): # Collect unique sources. if new_source not in sources: sources.append(new_source) # use the source from the p2sh data source if p2sh_encoding_source is not None: sources = p2sh_encoding_source else: sources = '-'.join(sources) destinations = '-'.join(destinations) return sources, destinations, btc_amount, round(fee), data, None def reinitialise(db, block_index=None): """Drop all predefined tables and initialise the database once again.""" cursor = db.cursor() # Delete all of the results of parsing (including the undolog) for table in TABLES + ['balances', 'undolog', 'undolog_block']: cursor.execute('''DROP TABLE IF EXISTS {}'''.format(table)) # Create missing tables initialise(db) # clean consensus hashes if first block hash doesn't match with checkpoint. if config.TESTNET: checkpoints = check.CHECKPOINTS_TESTNET elif config.REGTEST: checkpoints = check.CHECKPOINTS_REGTEST else: checkpoints = check.CHECKPOINTS_MAINNET columns = [column['name'] for column in cursor.execute('''PRAGMA table_info(blocks)''')] for field in ['ledger_hash', 'txlist_hash']: if field in columns: sql = '''SELECT {} FROM blocks WHERE block_index = ?'''.format(field) first_block = list(cursor.execute(sql, (config.BLOCK_FIRST,))) if first_block: first_hash = first_block[0][field] if first_hash != checkpoints[config.BLOCK_FIRST][field]: logger.info('First hash changed. Cleaning {}.'.format(field)) cursor.execute('''UPDATE blocks SET {} = NULL'''.format(field)) # For rollbacks, just delete new blocks and then reparse what’s left. if block_index: cursor.execute('''DELETE FROM transactions WHERE block_index > ?''', (block_index,)) cursor.execute('''DELETE FROM blocks WHERE block_index > ?''', (block_index,)) elif config.TESTNET or config.REGTEST: # block_index NOT specified and we are running testnet # just blow away the consensus hashes with a full testnet reparse, as we could activate # new features retroactively, which could otherwise lead to ConsensusError exceptions being raised. logger.info("Testnet/regtest full reparse detected: Clearing all consensus hashes before performing reparse.") cursor.execute('''UPDATE blocks SET ledger_hash = NULL, txlist_hash = NULL, messages_hash = NULL''') cursor.close() def reparse(db, block_index=None, quiet=False): """Reparse all transactions (atomically). If block_index is set, rollback to the end of that block. """ def reparse_from_undolog(db, block_index, quiet): """speedy reparse method that utilizes the undolog. if fails, fallback to the full reparse method""" if not block_index: return False # Can't reparse from undolog undolog_cursor = db.cursor() undolog_cursor.setexectrace(None) undolog_cursor.setrowtrace(None) def get_block_index_for_undo_index(undo_indexes, undo_index): for block_index, first_undo_index in undo_indexes.items(): #in order if undo_index < first_undo_index: return block_index - 1 else: return next(reversed(undo_indexes)) #the last inserted block_index with db: # Check if we can reparse from the undolog results = list(undolog_cursor.execute( '''SELECT block_index, first_undo_index FROM undolog_block WHERE block_index >= ? ORDER BY block_index ASC''', (block_index,))) undo_indexes = collections.OrderedDict() for result in results: undo_indexes[result[0]] = result[1] undo_start_block_index = block_index + 1 if undo_start_block_index not in undo_indexes: if block_index in undo_indexes: # Edge case, should only happen if we're "rolling back" to latest block (e.g. via cmd line) return True #skip undo else: return False # Undolog doesn't go that far back, full reparse required... # Grab the undolog... undolog = list(undolog_cursor.execute( '''SELECT undo_index, sql FROM undolog WHERE undo_index >= ? ORDER BY undo_index DESC''', (undo_indexes[undo_start_block_index],))) # Replay the undolog backwards, from the last entry to first_undo_index... for entry in undolog: logger.info("Undolog: Block {} (undo_index {}): {}".format( get_block_index_for_undo_index(undo_indexes, entry[0]), entry[0], entry[1])) undolog_cursor.execute(entry[1]) # Trim back tx and blocks undolog_cursor.execute('''DELETE FROM transactions WHERE block_index > ?''', (block_index,)) undolog_cursor.execute('''DELETE FROM blocks WHERE block_index > ?''', (block_index,)) # As well as undolog entries... undolog_cursor.execute('''DELETE FROM undolog WHERE undo_index >= ?''', (undo_indexes[undo_start_block_index],)) undolog_cursor.execute('''DELETE FROM undolog_block WHERE block_index >= ?''', (undo_start_block_index,)) undolog_cursor.close() return True if block_index: logger.info('Rolling back transactions to block {}.'.format(block_index)) else: logger.info('Reparsing all transactions.') check.software_version() reparse_start = time.time() # Reparse from the undolog if possible reparsed = reparse_from_undolog(db, block_index, quiet) cursor = db.cursor() if not reparsed: if block_index: logger.info("Could not roll back from undolog. Performing full reparse instead...") if quiet: root_logger = logging.getLogger() root_level = logger.getEffectiveLevel() with db: reinitialise(db, block_index) # Reparse all blocks, transactions. if quiet: root_logger.setLevel(logging.WARNING) previous_ledger_hash, previous_txlist_hash, previous_messages_hash = None, None, None cursor.execute('''SELECT * FROM blocks ORDER BY block_index''') for block in cursor.fetchall(): util.CURRENT_BLOCK_INDEX = block['block_index'] previous_ledger_hash, previous_txlist_hash, previous_messages_hash, previous_found_messages_hash = parse_block( db, block['block_index'], block['block_time'], previous_ledger_hash=previous_ledger_hash, previous_txlist_hash=previous_txlist_hash, previous_messages_hash=previous_messages_hash) if quiet and block['block_index'] % 10 == 0: # every 10 blocks print status root_logger.setLevel(logging.INFO) logger.info('Block (re-parse): %s (hashes: L:%s / TX:%s / M:%s%s)' % ( block['block_index'], previous_ledger_hash[-5:], previous_txlist_hash[-5:], previous_messages_hash[-5:], (' [overwrote %s]' % previous_found_messages_hash) if previous_found_messages_hash and previous_found_messages_hash != previous_messages_hash else '')) if quiet and block['block_index'] % 10 == 0: root_logger.setLevel(logging.WARNING) if quiet: root_logger.setLevel(root_level) with db: # Check for conservation of assets. check.asset_conservation(db) # Update database version number. database.update_version(db) cursor.close() reparse_end = time.time() logger.info("Reparse took {:.3f} minutes.".format((reparse_end - reparse_start) / 60.0)) # on full reparse - vacuum the DB afterwards for better subsequent performance (especially on non-SSDs) if not block_index: database.vacuum(db) def list_tx(db, block_hash, block_index, block_time, tx_hash, tx_index, tx_hex=None): assert type(tx_hash) == str cursor = db.cursor() # Edge case: confirmed tx_hash also in mempool cursor.execute('''SELECT * FROM transactions WHERE tx_hash = ?''', (tx_hash,)) transactions = list(cursor) if transactions: return tx_index # Get the important details about each transaction. if tx_hex is None: tx_hex = backend.getrawtransaction(tx_hash) # TODO: This is the call that is stalling the process the most source, destination, btc_amount, fee, data, decoded_tx = get_tx_info(tx_hex, db=db) if not source and decoded_tx and util.enabled('dispensers', block_index): outputs = decoded_tx[1] for out in outputs: if out[0] != decoded_tx[0][0] and dispenser.is_dispensable(db, out[0], out[1]): source = decoded_tx[0][0] destination = out[0] btc_amount = out[1] fee = 0 data = struct.pack(config.SHORT_TXTYPE_FORMAT, dispenser.DISPENSE_ID) data += b'\x00' break # Prevent inspection of further dispenses (only first one is valid) # For mempool if block_hash == None: block_hash = config.MEMPOOL_BLOCK_HASH block_index = config.MEMPOOL_BLOCK_INDEX else: assert block_index == util.CURRENT_BLOCK_INDEX if source and (data or destination == config.UNSPENDABLE or decoded_tx): logger.debug('Saving transaction: {}'.format(tx_hash)) cursor.execute('''INSERT INTO transactions( tx_index, tx_hash, block_index, block_hash, block_time, source, destination, btc_amount, fee, data) VALUES(?,?,?,?,?,?,?,?,?,?)''', (tx_index, tx_hash, block_index, block_hash, block_time, source, destination, btc_amount, fee, data) ) cursor.close() return tx_index + 1 else: logger.getChild('list_tx.skip').debug('Skipping transaction: {}'.format(tx_hash)) return tx_index def kickstart(db, bitcoind_dir): if bitcoind_dir is None: if platform.system() == 'Darwin': bitcoind_dir = os.path.expanduser('~/Library/Application Support/Bitcoin/') elif platform.system() == 'Windows': bitcoind_dir = os.path.join(os.environ['APPDATA'], 'Bitcoin') else: bitcoind_dir = os.path.expanduser('~/.bitcoin') if not os.path.isdir(bitcoind_dir): raise Exception('Bitcoin Core data directory not found at {}. Use --bitcoind-dir parameter.'.format(bitcoind_dir)) cursor = db.cursor() logger.warning('''Warning: - Ensure that bitcoind is stopped. - You must reindex bitcoind after the initialization is complete (restart with `-reindex=1`) - The initialization may take a while.''') if input('Proceed with the initialization? (y/N) : ') != 'y': return if config.TESTNET: first_hash = config.BLOCK_FIRST_TESTNET_HASH elif config.REGTEST: first_hash = config.BLOCK_FIRST_REGTEST_HASH else: first_hash = config.BLOCK_FIRST_MAINNET_HASH start_time_total = time.time() # Get hash of last known block. chain_parser = ChainstateParser(os.path.join(bitcoind_dir, 'chainstate')) last_hash = chain_parser.get_last_block_hash() chain_parser.close() # Start block parser. block_parser = BlockchainParser(os.path.join(bitcoind_dir, 'blocks'), os.path.join(bitcoind_dir, 'blocks/index')) current_hash = last_hash tx_index = 0 with db: # Prepare SQLite database. # TODO: Be more specific! logger.info('Preparing database.') start_time = time.time() reinitialise(db, block_index=config.BLOCK_FIRST - 1) logger.info('Prepared database in {:.3f}s'.format(time.time() - start_time)) # Get blocks and transactions, moving backwards in time. while current_hash != None: start_time = time.time() transactions = [] # Get `tx_info`s for transactions in this block. block = block_parser.read_raw_block(current_hash) for tx in block['transactions']: source, destination, btc_amount, fee, data = get_tx_info(tx['__data__'], block_parser=block_parser, block_index=block['block_index']) if source and (data or destination == config.UNSPENDABLE): transactions.append(( tx['tx_hash'], block['block_index'], block['block_hash'], block['block_time'], source, destination, btc_amount, fee, data )) logger.info('Valid transaction: {}'.format(tx['tx_hash'])) # Insert block and transactions into database. cursor.execute('''INSERT INTO blocks( block_index, block_hash, block_time) VALUES(?,?,?)''', (block['block_index'], block['block_hash'], block['block_time'])) if len(transactions): transactions = list(reversed(transactions)) tx_chunks = [transactions[i:i+90] for i in range(0, len(transactions), 90)] for tx_chunk in tx_chunks: sql = '''INSERT INTO transactions (tx_index, tx_hash, block_index, block_hash, block_time, source, destination, btc_amount, fee, data) VALUES ''' bindings = () bindings_place = [] # negative tx_index from -1 and inverse order for fast reordering # TODO: Can this be clearer? for tx in tx_chunk: bindings += (-(tx_index + 1),) + tx bindings_place.append('''(?,?,?,?,?,?,?,?,?,?)''') tx_index += 1 sql += ', '.join(bindings_place) cursor.execute(sql, bindings) logger.info('Block {} ({}): {}/{} saved in {:.3f}s'.format( block['block_index'], block['block_hash'], len(transactions), len(block['transactions']), time.time() - start_time)) # Get hash of next block. current_hash = block['hash_prev'] if current_hash != first_hash else None block_parser.close() # Reorder all transactions in database. logger.info('Reordering transactions.') start_time = time.time() cursor.execute('''UPDATE transactions SET tx_index = tx_index + ?''', (tx_index,)) logger.info('Reordered transactions in {:.3f}s.'.format(time.time() - start_time)) # Parse all transactions in database. reparse(db) cursor.close() logger.info('Total duration: {:.3f}s'.format(time.time() - start_time_total)) def last_db_index(db): cursor = db.cursor() try: blocks = list(cursor.execute('''SELECT * FROM blocks WHERE block_index = (SELECT MAX(block_index) from blocks)''')) try: return blocks[0]['block_index'] except IndexError: return 0 except apsw.SQLError: return 0 def get_next_tx_index(db): """Return index of next transaction.""" cursor = db.cursor() txes = list(cursor.execute('''SELECT * FROM transactions WHERE tx_index = (SELECT MAX(tx_index) from transactions)''')) if txes: assert len(txes) == 1 tx_index = txes[0]['tx_index'] + 1 else: tx_index = 0 cursor.close() return tx_index class MempoolError(Exception): pass def follow(db): # Check software version. check.software_version() # Initialise. initialise(db) # Get index of last block. if util.CURRENT_BLOCK_INDEX == 0: logger.warning('New database.') block_index = config.BLOCK_FIRST else: block_index = util.CURRENT_BLOCK_INDEX + 1 # Check database version. try: check.database_version(db) except check.DatabaseVersionError as e: logger.info(str(e)) # no need to reparse or rollback a new database if block_index != config.BLOCK_FIRST: reparse(db, block_index=e.reparse_block_index, quiet=False) else: #version update was included in reparse(), so don't do it twice database.update_version(db) logger.info('Resuming parsing.') # Get index of last transaction. tx_index = get_next_tx_index(db) not_supported = {} # No false positives. Use a dict to allow for O(1) lookups not_supported_sorted = collections.deque() # ^ Entries in form of (block_index, tx_hash), oldest first. Allows for easy removal of past, unncessary entries cursor = db.cursor() # a reorg can happen without the block count increasing, or even for that # matter, with the block count decreasing. This should only delay # processing of the new blocks a bit. while True: start_time = time.time() # Get block count. # If the backend is unreachable and `config.FORCE` is set, just sleep # and try again repeatedly. try: block_count = backend.getblockcount() except (ConnectionRefusedError, http.client.CannotSendRequest, backend.addrindexrs.BackendRPCError) as e: if config.FORCE: time.sleep(config.BACKEND_POLL_INTERVAL) continue else: raise e # Get new blocks. if block_index <= block_count: current_index = block_index # Backwards check for incorrect blocks due to chain reorganisation, and stop when a common parent is found. if block_count - block_index < 100: # Undolog only saves last 100 blocks, if there's a reorg deeper than that manual reparse should be done requires_rollback = False while True: if current_index == config.BLOCK_FIRST: break logger.debug('Checking that block {} is not an orphan.'.format(current_index)) # Backend parent hash. current_hash = backend.getblockhash(current_index) current_cblock = backend.getblock(current_hash) backend_parent = bitcoinlib.core.b2lx(current_cblock.hashPrevBlock) # DB parent hash. blocks = list(cursor.execute('''SELECT * FROM blocks WHERE block_index = ?''', (current_index - 1,))) if len(blocks) != 1: # For empty DB. break db_parent = blocks[0]['block_hash'] # Compare. assert type(db_parent) == str assert type(backend_parent) == str if db_parent == backend_parent: break else: current_index -= 1 requires_rollback = True # Rollback for reorganisation. if requires_rollback: # Record reorganisation. logger.warning('Blockchain reorganisation at block {}.'.format(current_index)) log.message(db, block_index, 'reorg', None, {'block_index': current_index}) # Rollback the DB. reparse(db, block_index=current_index-1, quiet=True) block_index = current_index tx_index = get_next_tx_index(db) continue # Check version. (Don’t add any blocks to the database while # running an out‐of‐date client!) check.software_version() # Get and parse transactions in this block (atomically). block_hash = backend.getblockhash(current_index) block = backend.getblock(block_hash) previous_block_hash = bitcoinlib.core.b2lx(block.hashPrevBlock) block_time = block.nTime txhash_list, raw_transactions = backend.get_tx_list(block) with db: util.CURRENT_BLOCK_INDEX = block_index # List the block. cursor.execute('''INSERT INTO blocks( block_index, block_hash, block_time, previous_block_hash, difficulty) VALUES(?,?,?,?,?)''', (block_index, block_hash, block_time, previous_block_hash, block.difficulty) ) # List the transactions in the block. for tx_hash in txhash_list: tx_hex = raw_transactions[tx_hash] tx_index = list_tx(db, block_hash, block_index, block_time, tx_hash, tx_index, tx_hex) # Parse the transactions in the block. new_ledger_hash, new_txlist_hash, new_messages_hash, found_messages_hash = parse_block(db, block_index, block_time) # When newly caught up, check for conservation of assets. if block_index == block_count: if config.CHECK_ASSET_CONSERVATION: check.asset_conservation(db) # Remove any non‐supported transactions older than ten blocks. while len(not_supported_sorted) and not_supported_sorted[0][0] <= block_index - 10: tx_h = not_supported_sorted.popleft()[1] del not_supported[tx_h] logger.info('Block: %s (%ss, hashes: L:%s / TX:%s / M:%s%s)' % ( str(block_index), "{:.2f}".format(time.time() - start_time, 3), new_ledger_hash[-5:], new_txlist_hash[-5:], new_messages_hash[-5:], (' [overwrote %s]' % found_messages_hash) if found_messages_hash and found_messages_hash != new_messages_hash else '')) # Increment block index. block_count = backend.getblockcount() block_index += 1 else: # TODO: add zeromq support here to await TXs and Blocks instead of constantly polling # Get old mempool. old_mempool = list(cursor.execute('''SELECT * FROM mempool''')) old_mempool_hashes = [message['tx_hash'] for message in old_mempool] if backend.MEMPOOL_CACHE_INITIALIZED is False: backend.init_mempool_cache() logger.info("Ready for queries.") # Fake values for fake block. curr_time = int(time.time()) mempool_tx_index = tx_index xcp_mempool = [] raw_mempool = backend.getrawmempool() # this is a quick fix to make counterparty usable on high mempool situations # however, this makes the mempool unreliable on counterparty, a better, larger # fix must be done by changing this whole function into a zmq driven loop if len(raw_mempool) > config.MEMPOOL_TXCOUNT_UPDATE_LIMIT: continue # For each transaction in Bitcoin Core mempool, if it’s new, create # a fake block, a fake transaction, capture the generated messages, # and then save those messages. # Every transaction in mempool is parsed independently. (DB is rolled back after each one.) # We first filter out which transactions we've already parsed before so we can batch fetch their raw data parse_txs = [] for tx_hash in raw_mempool: # If already in mempool, copy to new one. if tx_hash in old_mempool_hashes: for message in old_mempool: if message['tx_hash'] == tx_hash: xcp_mempool.append((tx_hash, message)) # If not a supported XCP transaction, skip. elif tx_hash in not_supported: pass # Else: list, parse and save it. else: parse_txs.append(tx_hash) # fetch raw for all transactions that need to be parsed # Sometimes the transactions can’t be found: `{'code': -5, 'message': 'No information available about transaction'}` # - is txindex enabled in Bitcoind? # - or was there a block found while batch feting the raw txs # - or was there a double spend for w/e reason accepted into the mempool (replace-by-fee?) try: raw_transactions = backend.getrawtransaction_batch(parse_txs) except Exception as e: logger.warning('Failed to fetch raw for mempool TXs, restarting loop; %s', (e, )) continue # restart the follow loop for tx_hash in parse_txs: try: with db: # List the fake block. cursor.execute('''INSERT INTO blocks( block_index, block_hash, block_time) VALUES(?,?,?)''', (config.MEMPOOL_BLOCK_INDEX, config.MEMPOOL_BLOCK_HASH, curr_time) ) tx_hex = raw_transactions[tx_hash] if tx_hex is None: logger.debug('tx_hash %s not found in backend. Not adding to mempool.', (tx_hash, )) raise MempoolError mempool_tx_index = list_tx(db, None, block_index, curr_time, tx_hash, tx_index=mempool_tx_index, tx_hex=tx_hex) # Parse transaction. cursor.execute('''SELECT * FROM transactions WHERE tx_hash = ?''', (tx_hash,)) transactions = list(cursor) if transactions: assert len(transactions) == 1 transaction = transactions[0] supported = parse_tx(db, transaction) if not supported: not_supported[tx_hash] = '' not_supported_sorted.append((block_index, tx_hash)) else: # If a transaction hasn’t been added to the # table `transactions`, then it’s not a # Counterparty transaction. not_supported[tx_hash] = '' not_supported_sorted.append((block_index, tx_hash)) raise MempoolError # Save transaction and side‐effects in memory. cursor.execute('''SELECT * FROM messages WHERE block_index = ?''', (config.MEMPOOL_BLOCK_INDEX,)) for message in list(cursor): xcp_mempool.append((tx_hash, message)) # Rollback. raise MempoolError except exceptions.ParseTransactionError as e: logger.warn('ParseTransactionError for tx %s: %s' % (tx_hash, e)) except MempoolError: pass # Re‐write mempool messages to database. with db: cursor.execute('''DELETE FROM mempool''') for message in xcp_mempool: tx_hash, new_message = message new_message['tx_hash'] = tx_hash cursor.execute('''INSERT INTO mempool VALUES(:tx_hash, :command, :category, :bindings, :timestamp)''', new_message) elapsed_time = time.time() - start_time sleep_time = config.BACKEND_POLL_INTERVAL - elapsed_time if elapsed_time <= config.BACKEND_POLL_INTERVAL else 0 logger.getChild('mempool').debug('Refresh mempool: %s XCP txs seen, out of %s total entries (took %ss, next refresh in %ss)' % ( len(xcp_mempool), len(raw_mempool), "{:.2f}".format(elapsed_time, 3), "{:.2f}".format(sleep_time, 3))) # Wait db.wal_checkpoint(mode=apsw.SQLITE_CHECKPOINT_PASSIVE) time.sleep(sleep_time) cursor.close() # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 #### check.py import json import requests import logging logger = logging.getLogger(__name__) import warnings import time import sys from counterpartylib.lib import config from counterpartylib.lib import util from counterpartylib.lib import exceptions from counterpartylib.lib import backend from counterpartylib.lib import database CONSENSUS_HASH_SEED = 'We can only see a short distance ahead, but we can see plenty there that needs to be done.' CONSENSUS_HASH_VERSION_MAINNET = 2 CHECKPOINTS_MAINNET = { config.BLOCK_FIRST_MAINNET: {'ledger_hash': '766ff0a9039521e3628a79fa669477ade241fc4c0ae541c3eae97f34b547b0b7', 'txlist_hash': '766ff0a9039521e3628a79fa669477ade241fc4c0ae541c3eae97f34b547b0b7'}, 280000: {'ledger_hash': '265719e2770d5a6994f6fe49839069183cd842ee14f56c2b870e56641e8a8725', 'txlist_hash': 'a59b33b4633649db4f14586af47e258ed9b8884dbb7aa308fb1f49a653ee60f4'}, 290000: {'ledger_hash': '4612ed7034474b4ff1727eb0e216d533ebe7ac755fb015e0f9a170c063f3e84c', 'txlist_hash': 'c15423c849fd360d38cbd6c6c3ea37a07fece723da92353f3056facc2676d9e7'}, 300000: {'ledger_hash': '9a3dd4949780404d61e5ca1929f94a43f08eb0fa19ccb4b5d6a61cafd7943199', 'txlist_hash': 'efa02dbdcc4158a598e3b476ece5ba9cc8d26f3abc8ac3777ac6dde0f0afc7e6'}, 310000: {'ledger_hash': '45e43d5cc77ea01129df01d7f55b0c89b2d4e18cd3d626fd92f30bfb37a85f4d', 'txlist_hash': '83cdcf75833d828ded09979b601fde87e2fdb0f5eb1cc6ab5d2042b7ec85f90e'}, 320000: {'ledger_hash': '91c1d33626669e8098bc762b1a9e3f616884e4d1cadda4881062c92b0d3d3e98', 'txlist_hash': '761793042d8e7c80e14a16c15bb9d40e237c468a87c207a59730b616bdfde7d4'}, 330000: {'ledger_hash': 'dd56aa97e5ca15841407f383ce1d7814536a594d7cfffcb4cf60bee8b362065a', 'txlist_hash': '3c45b4377a99e020550a198daa45c378c488a72ba199b53deb90b320d55a897b'}, 334000: {'ledger_hash': '24c4fa4097106031267439eb9fbe8ce2a18560169c67726652b608908c1ca9bb', 'txlist_hash': '764ca9e8d3b9546d1c4ff441a39594548989f60daefc6f28e046996e76a273bf'}, 335000: {'ledger_hash': 'e57c9d606a615e7e09bf99148596dd28e64b25cd8b081e226d535a64c1ed08d1', 'txlist_hash': '437d9507185b5e193627edf4998aad2264755af8d13dd3948ce119b32dd50ce2'}, 336000: {'ledger_hash': '1329ff5b80d034b64f6ea3481b7c7176437a8837b2a7cb7b8a265fdd1397572d', 'txlist_hash': '33eb8cacd4c750f8132d81e8e43ca13bd565f1734d7d182346364847414da52f'}, 337000: {'ledger_hash': '607e6a93e8d97cefea9bd55384898ee90c8477ded8a46017f2294feedbc83409', 'txlist_hash': '20b535a55abcc902ca70c19dd648cbe5149af8b4a4157b94f41b71fc422d428e'}, 338000: {'ledger_hash': 'f043914c71e4b711abb1c1002767b9a4e7d605e249facaaf7a2046b0e9741204', 'txlist_hash': 'fa2c3f7f76345278271ed5ec391d582858e10b1f154d9b44e5a1f4896400ee46'}, 339000: {'ledger_hash': '49f7240bc90ebc2f242dd599c7d2c427b9d2ac844992131e6e862b638ae4393a', 'txlist_hash': 'c1e3b497c054dcf67ddd0dc223e8b8a6e09a1a05bacb9fef5c03e48bd01e64e7'}, 340000: {'ledger_hash': '255760e2abfb79fdd76b65759f1590f582c1747f3eeccc4b2ae37d23e30e0729', 'txlist_hash': '8502004bb63e699b243ac8af072d704c69b817905e74787c2031af971e8cd87c'}, 341000: {'ledger_hash': '1369cba3909e564d2e725879a8b2cd987df075db121d1d421c8ce16b65f4bf04', 'txlist_hash': 'd217d0bed190cb27f58fcb96b255f8006bc4b9ed739e1bb08507201c49c426c8'}, 342000: {'ledger_hash': '9e7e9b8620717189ccea697ff2f84fe71bc4ae8d991481ff235164d72a9e6e4f', 'txlist_hash': 'adf75d023760101b2b337f6359dd811b12521c83837eb3f7db3bbfd0b095aa54'}, 343000: {'ledger_hash': 'aa47312ebe94b35504bec6c74713e404e5f36854e0836839344d13debe50558c', 'txlist_hash': '6bdbbc96364b3c92cea132fe66a0925f9445a249f7062326bdcc4ad4711f0c01'}, 344000: {'ledger_hash': '40187263aa96d1362bf7b19c8ba0fff7f0c0f3eb132a40fc90601b5926c7e6e3', 'txlist_hash': '98da8efe705c4b54275bfd25f816a7e7a4ff1f67647e17d7a0aaa2a3fef8bda0'}, 345000: {'ledger_hash': 'e4a1e1be4beea63d9740ca166b75bb4e3ffa2af33e1fe282e5b09c4952a7448c', 'txlist_hash': '777f163eaa5ad79dcb738871d4318a0699defec469d8afe91ab6277ff8d3e8b8'}, 350000: {'ledger_hash': '6a67e9f2e9d07e7bb3277cf9c24f84c857ed1b8fff4a37e589cd56ade276dd95', 'txlist_hash': '96bcbdbce74b782a845d4fda699846d2d3744044c2870a413c018642b8c7c3bf'}, 355000: {'ledger_hash': 'a84b17992217c7845e133a8597dac84eba1ee8c48bcc7f74bcf512837120f463', 'txlist_hash': '210d96b42644432b9e1a3433a29af9acb3bad212b67a7ae1dbc011a11b04bc24'}, 360000: {'ledger_hash': 'ddca07ea43b336b703fb8ebab6c0dc30582eb360d6f0eb0446e1fe58b53dee0a', 'txlist_hash': '31d0ff3e3782cf9464081829c5595b3de5ac477290dc069d98672f3f552767f8'}, 365000: {'ledger_hash': '2d55b126cca3eca15c07b5da683988f9e01d7346d2ca430e940fd7c07ce84fd7', 'txlist_hash': '7988a823cc1e3234953cc87d261d3c1fede8493d0a31b103357eb23cc7dc2eda'}, 366000: {'ledger_hash': '64ce274df2784f9ca88a8d7071613ec6527e506ec31cd434eca64c6a3345a6b7', 'txlist_hash': '0d4374da6100e279b24f4ba4a2d6afbfc4fb0fc2d312330a515806e8c5f49404'}, 370000: {'ledger_hash': 'fabb2a2e91fad3fe7734169d554cca396c1030243044cef42fcf65717cf0fa61', 'txlist_hash': '41d1732868c9ac25951ace5ca9f311a15d5eca9bf8d548e0d988c050bd2aff87'}, 375000: {'ledger_hash': 'a7ac4e2948cea0c426c8fc201cf57d9c313027ea7bff2b32a25ed28d3dbaa581', 'txlist_hash': '96118a7aa2ca753488755b7419a0f44a7fbc371bc58dcc7ab083c70fc14ef8b3'}, 380000: {'ledger_hash': '70453ba04c1c0198c4771e7964cffa25f9456c2f71456a8b05dfe935d5fcdc88', 'txlist_hash': '8bf2070103cca6f0bde507b7d20b0ba0630da6349beb560fa64c926d08dbcaef'}, 385000: {'ledger_hash': '93eb0a6e820bee197e7591edbc5ead7bfa38f32c88aabf4785f080fd6ae96c4c', 'txlist_hash': '1f8f17fd5766382a8c10a2a0e995a7d5a5d1bcd5fc0220d1e2691b2a94dcc78f'}, 390000: {'ledger_hash': '7d42b98eecbc910a67a5f4ac8dc7d6d9b6995ebc5bdf53663b414965fe7d2c5e', 'txlist_hash': 'b50efc4a4241bf3ec33a38c3b5f34756a9f305fe5fa9a80f7f9b70d5d7b2a780'}, 395000: {'ledger_hash': '89f9ac390b35e69dd75d6c34854ba501dce2f662fc707aee63cad5822c7660f2', 'txlist_hash': '2151dd2f0aa14685f3d041727a689d5d242578072a049123b317724fc4f1100c'}, 400000: {'ledger_hash': 'eb681a305125e04b6f044b36045e23ee248ce4eb68433cea2b36d15e7e74d5f1', 'txlist_hash': 'b48e9501e8d6f1f1b4127d868860885d3db76698c2c31a567777257df101cf61'}, 405000: {'ledger_hash': '3725055b37a8958ade6ca1c277cf50fee6036b4a92befb8da2f7c32f0b210881', 'txlist_hash': '871b2adfd246e3fe69f0fe9098e3251045ed6e9712c4cf90ea8dfdd1eb330ed6'}, 410000: {'ledger_hash': '1fa9a34f233695ebd7ebb08703bf8d99812fa099f297efc5d307d1ebef902ffd', 'txlist_hash': 'ee3bd84c728a37e2bbe061c1539c9ee6d71db18733b1ed53ee8d320481f55030'}, 415000: {'ledger_hash': '6772a8a1c784db14c0bf111e415919c9da4e5ca142be0b9e323c82c1b13c74e0', 'txlist_hash': 'cfb81785cd48e9ba0e54fee4d62f49b347489da82139fd5e1555ae0bc11a33d5'}, 420000: {'ledger_hash': '42167117e16943f44bb8117aa0a39bed2d863a454cd694d0bc5006a7aab23b06', 'txlist_hash': 'a1139870bef8eb9bbe60856029a4f01fce5432eb7aeacd088ba2e033757b86e3'}, } CONSENSUS_HASH_VERSION_TESTNET = 7 CHECKPOINTS_TESTNET = { config.BLOCK_FIRST_TESTNET: {'ledger_hash': '63f0fef31d02da85fa779e9a0e1b585b1a6a4e59e14564249e288e074e91c223', 'txlist_hash': '63f0fef31d02da85fa779e9a0e1b585b1a6a4e59e14564249e288e074e91c223'}, 316000: {'ledger_hash': 'f645e6877da416b8b91670ac927df686c5ea6fc1158c150ae49d594222ed504c', 'txlist_hash': '3e29bcbf3873326097024cc26e9296f0164f552dd79c2ee7cfc344e6d64fa87d'}, 319000: {'ledger_hash': '384ca28ac56976bc24a6ab7572b41bc61474e6b87fdee814135701d6a8f5c8a2', 'txlist_hash': '6c05c98418a6daa6de82dd59e000d3f3f5407c5432d4ab7d76047873a38e4d4b'}, 322000: {'ledger_hash': 'f4015c37eb4f31ac42083fd0389cde4868acb5353d3f3abfe2f3a88aba8cae72', 'txlist_hash': '18f278154e9bc3bbcc39da905ab4ad3023742ab7723b55b0fd1c58c36cd3e9bf'}, 325000: {'ledger_hash': 'd7f70a927f5aeed38e559ddc0bc4697601477ea43cde928ad228fefc195b02da', 'txlist_hash': '1a60e38664b39e0f501b3e5a60c6fc0bd4ed311b74872922c2dde4cb2267fd3e'}, 329000: {'ledger_hash': '96637b4400cbe084c2c4f139f59b5bc16770815e96306423aaeb2b2677a9a657', 'txlist_hash': '79d577d8fbba0ad6ae67829dfa5824f758286ccd429d65b7d1d42989134d5b57'}, 350000: {'ledger_hash': 'cae8fec787bba3d5c968a8f4b6fb22a54c96d5acbeadd0425f6b20c3a8813ea3', 'txlist_hash': '097df9c3079df4d96f59518df72492dfd7a79716462e3a4a30d62a37aec6fc16'}, 400000: {'ledger_hash': '94abfd9c00c8462c155f64011e71af141b7d524e17de5aeda26b7469fe79b5f0', 'txlist_hash': 'a9fc42b69f80ec69f3f98e8a3cd81f4f946544fd0561a62a0891254c16970a87'}, 450000: {'ledger_hash': '09eb9f2aa605ce77225362b4b556284acdd9f6d3bc273372dfae4a5be9e9b035', 'txlist_hash': '05af651c1de49d0728834991e50000fbf2286d7928961b71917f682a0f2b7171'}, 500000: {'ledger_hash': '85f3bca8c88246ddfa1a5ec327e71f0696c182ed2a5fedf3712cd2e87e2661ac', 'txlist_hash': '663b34955116a96501e0c1c27f27d24bad7d45995913367553c5cfe4b8b9d0a9'}, 550000: {'ledger_hash': 'c143026133af2d83bc49ef205b4623194466ca3e7c79f95da2ad565359ccb5ad', 'txlist_hash': '097b8bca7a243e0b9bdf089f34de15bd2dcd4727fb4e88aae7bfd96302250326'}, 600000: {'ledger_hash': '82caf720967d0e43a1c49a6c75f255d9056ed1bffe3f96d962478faccdaba8ff', 'txlist_hash': '0d99f42184233426d70102d5ac3c80aaecf804d441a8a0d0ef26038d333ab7a7'}, 650000: {'ledger_hash': 'bef100ae7d5027a8b3f32416c4f26e1f16b21cee2a986c57be1466a3ba338051', 'txlist_hash': '409ed86e4274b511193d187df92e433c734dcc890bf93496e7a7dee770e7035e'}, 700000: {'ledger_hash': 'afe5e9c3f3a8c6f19c4f9feaf09df051c28202c6bae64f3563a09ffea9e79a6e', 'txlist_hash': '4f9765158855d24950c7e076615b0ad5b72738d4d579decfd3b93c998edf4fcb'}, 750000: {'ledger_hash': 'e7c7969a6156facb193b77ef71b5e3fac49c6998e5a94ec3b90292be10ece9cc', 'txlist_hash': '6e511790656d3ffec0c912d697e5d1c2a4e401a1606203c77ab5a5855891bc2c'}, 800000: {'ledger_hash': '42a7c679e51e5e8d38df26b67673b4850e8e6f72723aa19673b3219fcc02b77b', 'txlist_hash': '885ae1e6c21f5fb3645231aaa6bb6910fc21a0ae0ca5dbe9a4011f3b5295b3e7'}, 850000: {'ledger_hash': '35b2a2ab4a8bfbc321d4545292887b4ccaea73415c7674f795aefa6e240890eb', 'txlist_hash': '72d5cfe1e729a22da9eacd4d7752c881c43a191904556b65a0fae82b770dcdf3'}, 900000: {'ledger_hash': 'a5552b4998d2e5a516b9310d6592e7368771c1ad3b6e6330f6bc0baa3db31643', 'txlist_hash': '5a2e9fbd9b52ee32b8e8bfff993ed92dc22510aa7448277a704176cf01e55b04'}, 950000: {'ledger_hash': '5a5e78b55ac294690229abff7ff8f74f390f3a47dc4d08a0bac40e2e89a5bed2', 'txlist_hash': 'f4fa9838fb38d3e5beffb760fae022dcc59c61c506dd28ac83ee48ba814d04b2'}, 1000000: {'ledger_hash': 'eafca6700b9fd8f3992f8a18316e9ad59480ef74a4e7737793c101878aba8e1a', 'txlist_hash': '03deb626e031f30acd394bf49c35e11a487cb11e55dff5ba9a3f6d04b460c7de'}, 1050000: {'ledger_hash': '8012ebaf4c6638173e88ecd3e7bb2242ab88a9bdf877fc32c42dbcd7d2d3bab1', 'txlist_hash': '896274fdba957961083b07b80634126bc9f0434b67d723ed1fa83157ce5cd9a7'}, 1100000: {'ledger_hash': '76357f917235daa180c904cdf5c44366eef3e33539b7b0ba6a38f89582e82d22', 'txlist_hash': '36ecfd4b07f23176cd6960bc0adef97472c13793e53ac3df0eea0dd2e718a570'}, 1150000: {'ledger_hash': '5924f004bfdc3be449401c764808ebced542d2e06ba30c5984830292d1a926aa', 'txlist_hash': '9ff139dacf4b04293074e962153b972d25fa16d862dae05f7f3acc15e83c4fe8'}, 1200000: {'ledger_hash': 'a3d009bd2e0b838c185b8866233d7b4edaff87e5ec4cc4719578d1a8f9f8fe34', 'txlist_hash': '11dcf3a0ab714f05004a4e6c77fe425eb2a6427e4c98b7032412ab29363ffbb2'}, 1250000: {'ledger_hash': '37244453b4eac67d1dbfc0f60116cac90dab7b814d756653ad3d9a072fbac61a', 'txlist_hash': 'c01ed3113f8fd3a6b54f5cefafd842ebf7c314ce82922e36236414d820c5277a'}, 1300000: {'ledger_hash': 'a83c1cd582604130fd46f1304560caf0f4e3300f3ce7c3a89824b8901f13027f', 'txlist_hash': '67e663b75a80940941b8370ada4985be583edaa7ba454d49db9a864a7bb7979c'}, 1350000: {'ledger_hash': 'f96e6aff578896a4568fb69f72aa0a8b52eb9ebffefca4bd7368790341cd821d', 'txlist_hash': '83e7d31217af274b13889bd8b9f8f61afcd7996c2c8913e9b53b1d575f54b7c1'}, 1400000: {'ledger_hash': '85a23f6fee9ce9c80fa335729312183ff014920bbf297095ac77c4105fb67e17', 'txlist_hash': 'eee762f34a3f82e6332c58e0c256757d97ca308719323af78bf5924f08463e12'}, } CONSENSUS_HASH_VERSION_REGTEST = 1 CHECKPOINTS_REGTEST = { config.BLOCK_FIRST_REGTEST: {'ledger_hash': '33cf0669a0d309d7e6b1bf79494613b69262b58c0ea03c9c221d955eb4c84fe5', 'txlist_hash': '33cf0669a0d309d7e6b1bf79494613b69262b58c0ea03c9c221d955eb4c84fe5'}, } class ConsensusError(Exception): pass def consensus_hash(db, field, previous_consensus_hash, content): cursor = db.cursor() block_index = util.CURRENT_BLOCK_INDEX # Initialise previous hash on first block. if block_index <= config.BLOCK_FIRST: assert not previous_consensus_hash previous_consensus_hash = util.dhash_string(CONSENSUS_HASH_SEED) # Get previous hash. if not previous_consensus_hash: try: previous_consensus_hash = list(cursor.execute('''SELECT * FROM blocks WHERE block_index = ?''', (block_index - 1,)))[0][field] except IndexError: previous_consensus_hash = None if not previous_consensus_hash: raise ConsensusError('Empty previous {} for block {}. Please launch a `reparse`.'.format(field, block_index)) # Calculate current hash. if config.TESTNET: consensus_hash_version = CONSENSUS_HASH_VERSION_TESTNET elif config.REGTEST: consensus_hash_version = CONSENSUS_HASH_VERSION_REGTEST else: consensus_hash_version = CONSENSUS_HASH_VERSION_MAINNET calculated_hash = util.dhash_string(previous_consensus_hash + '{}{}'.format(consensus_hash_version, ''.join(content))) # Verify hash (if already in database) or save hash (if not). # NOTE: do not enforce this for messages_hashes, those are more informational (for now at least) found_hash = list(cursor.execute('''SELECT * FROM blocks WHERE block_index = ?''', (block_index,)))[0][field] or None if found_hash and field != 'messages_hash': # Check against existing value. if calculated_hash != found_hash: raise ConsensusError('Inconsistent {} for block {} (calculated {}, vs {} in database).'.format( field, block_index, calculated_hash, found_hash)) else: # Save new hash. cursor.execute('''UPDATE blocks SET {} = ? WHERE block_index = ?'''.format(field), (calculated_hash, block_index)) # Check against checkpoints. if config.TESTNET: checkpoints = CHECKPOINTS_TESTNET elif config.REGTEST: checkpoints = CHECKPOINTS_REGTEST else: checkpoints = CHECKPOINTS_MAINNET if field != 'messages_hash' and block_index in checkpoints and checkpoints[block_index][field] != calculated_hash: raise ConsensusError('Incorrect {} hash for block {}. Calculated {} but expected {}'.format(field, block_index, calculated_hash, checkpoints[block_index][field],)) return calculated_hash, found_hash class SanityError(Exception): pass def asset_conservation(db): logger.debug('Checking for conservation of assets.') supplies = util.supplies(db) held = util.held(db) for asset in supplies.keys(): asset_issued = supplies[asset] asset_held = held[asset] if asset in held and held[asset] != None else 0 if asset_issued != asset_held: raise SanityError('{} {} issued ≠ {} {} held'.format(util.value_out(db, asset_issued, asset), asset, util.value_out(db, asset_held, asset), asset)) logger.debug('{} has been conserved ({} {} both issued and held)'.format(asset, util.value_out(db, asset_issued, asset), asset)) class VersionError(Exception): pass class VersionUpdateRequiredError(VersionError): pass def check_change(protocol_change, change_name): # Check client version. passed = True if config.VERSION_MAJOR < protocol_change['minimum_version_major']: passed = False elif config.VERSION_MAJOR == protocol_change['minimum_version_major']: if config.VERSION_MINOR < protocol_change['minimum_version_minor']: passed = False elif config.VERSION_MINOR == protocol_change['minimum_version_minor']: if config.VERSION_REVISION < protocol_change['minimum_version_revision']: passed = False if not passed: explanation = 'Your version of {} is v{}, but, as of block {}, the minimum version is v{}.{}.{}. Reason: ‘{}’. Please upgrade to the latest version and restart the server.'.format( config.APP_NAME, config.VERSION_STRING, protocol_change['block_index'], protocol_change['minimum_version_major'], protocol_change['minimum_version_minor'], protocol_change['minimum_version_revision'], change_name) if util.CURRENT_BLOCK_INDEX >= protocol_change['block_index']: raise VersionUpdateRequiredError(explanation) else: warnings.warn(explanation) def software_version(): if config.FORCE: return logger.debug('Checking version.') try: host = 'https://counterpartyxcp.github.io/counterparty-lib/counterpartylib/protocol_changes.json' response = requests.get(host, headers={'cache-control': 'no-cache'}) versions = json.loads(response.text) except (requests.exceptions.ConnectionError, ConnectionRefusedError, ValueError) as e: logger.warning('Unable to check version! ' + str(sys.exc_info()[1])) return for change_name in versions: protocol_change = versions[change_name] try: check_change(protocol_change, change_name) except VersionUpdateRequiredError as e: logger.error("Version Update Required", exc_info=sys.exc_info()) sys.exit(config.EXITCODE_UPDATE_REQUIRED) logger.debug('Version check passed.') class DatabaseVersionError(Exception): def __init__(self, message, reparse_block_index): super(DatabaseVersionError, self).__init__(message) self.reparse_block_index = reparse_block_index def database_version(db): if config.FORCE: return logger.debug('Checking database version.') version_major, version_minor = database.version(db) if version_major != config.VERSION_MAJOR: # Rollback database if major version has changed. raise DatabaseVersionError('Client major version number mismatch ({} ≠ {}).'.format(version_major, config.VERSION_MAJOR), config.BLOCK_FIRST) elif version_minor != config.VERSION_MINOR: # Reparse all transactions if minor version has changed. raise DatabaseVersionError('Client minor version number mismatch ({} ≠ {}).'.format(version_minor, config.VERSION_MINOR), None) # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 #### config.py import sys """Variables prefixed with `DEFAULT` should be able to be overridden by configuration file and command‐line arguments.""" UNIT = 100000000 # The same across assets. # Versions VERSION_MAJOR = 9 VERSION_MINOR = 60 VERSION_REVISION = 1 VERSION_STRING = str(VERSION_MAJOR) + '.' + str(VERSION_MINOR) + '.' + str(VERSION_REVISION) # Counterparty protocol TXTYPE_FORMAT = '>I' SHORT_TXTYPE_FORMAT = 'B' TWO_WEEKS = 2 * 7 * 24 * 3600 MAX_EXPIRATION = 4 * 2016 # Two months MEMPOOL_BLOCK_HASH = 'mempool' MEMPOOL_BLOCK_INDEX = 9999999 # SQLite3 MAX_INT = 2**63 - 1 # Bitcoin Core OP_RETURN_MAX_SIZE = 80 # bytes # Currency agnosticism BTC = 'BTC' XCP = 'XCP' BTC_NAME = 'Bitcoin' XCP_NAME = 'Counterparty' APP_NAME = XCP_NAME.lower() DEFAULT_RPC_PORT_REGTEST = 24000 DEFAULT_RPC_PORT_TESTNET = 14000 DEFAULT_RPC_PORT = 4000 DEFAULT_BACKEND_PORT_REGTEST = 28332 DEFAULT_BACKEND_PORT_TESTNET = 18332 DEFAULT_BACKEND_PORT = 8332 DEFAULT_INDEXD_PORT_REGTEST = 28432 DEFAULT_INDEXD_PORT_TESTNET = 18432 DEFAULT_INDEXD_PORT = 8432 UNSPENDABLE_REGTEST = 'mvCounterpartyXXXXXXXXXXXXXXW24Hef' UNSPENDABLE_TESTNET = 'mvCounterpartyXXXXXXXXXXXXXXW24Hef' UNSPENDABLE_MAINNET = '1CounterpartyXXXXXXXXXXXXXXXUWLpVr' ADDRESSVERSION_TESTNET = b'\x6f' P2SH_ADDRESSVERSION_TESTNET = b'\xc4' PRIVATEKEY_VERSION_TESTNET = b'\xef' ADDRESSVERSION_MAINNET = b'\x00' P2SH_ADDRESSVERSION_MAINNET = b'\x05' PRIVATEKEY_VERSION_MAINNET = b'\x80' ADDRESSVERSION_REGTEST = b'\x6f' P2SH_ADDRESSVERSION_REGTEST = b'\xc4' PRIVATEKEY_VERSION_REGTEST = b'\xef' MAGIC_BYTES_TESTNET = b'\xfa\xbf\xb5\xda' # For bip-0010 MAGIC_BYTES_MAINNET = b'\xf9\xbe\xb4\xd9' # For bip-0010 MAGIC_BYTES_REGTEST = b'\xda\xb5\xbf\xfa' BLOCK_FIRST_TESTNET_TESTCOIN = 310000 BURN_START_TESTNET_TESTCOIN = 310000 BURN_END_TESTNET_TESTCOIN = 4017708 # Fifty years, at ten minutes per block. BLOCK_FIRST_TESTNET = 310000 BLOCK_FIRST_TESTNET_HASH = '000000001f605ec6ee8d2c0d21bf3d3ded0a31ca837acc98893876213828989d' BURN_START_TESTNET = 310000 BURN_END_TESTNET = 4017708 # Fifty years, at ten minutes per block. BLOCK_FIRST_MAINNET_TESTCOIN = 278270 BURN_START_MAINNET_TESTCOIN = 278310 BURN_END_MAINNET_TESTCOIN = 2500000 # A long time. BLOCK_FIRST_MAINNET = 278270 BLOCK_FIRST_MAINNET_HASH = '00000000000000017bac9a8e85660ad348050c789922d5f8fe544d473368be1a' BURN_START_MAINNET = 278310 BURN_END_MAINNET = 283810 BLOCK_FIRST_REGTEST = 0 BLOCK_FIRST_REGTEST_HASH = '0f9188f13cb7b2c71f2a335e3a4fc328bf5beb436012afca590b1a11466e2206' BURN_START_REGTEST = 101 BURN_END_REGTEST = 150000000 BLOCK_FIRST_REGTEST_TESTCOIN = 0 BURN_START_REGTEST_TESTCOIN = 101 BURN_END_REGTEST_TESTCOIN = 150 # Protocol defaults # NOTE: If the DUST_SIZE constants are changed, they MUST also be changed in counterblockd/lib/config.py as well DEFAULT_REGULAR_DUST_SIZE = 546 # TODO: Revisit when dust size is adjusted in bitcoin core DEFAULT_MULTISIG_DUST_SIZE = 7800 # DEFAULT_OP_RETURN_VALUE = 0 DEFAULT_FEE_PER_KB_ESTIMATE_SMART = 1024 DEFAULT_FEE_PER_KB = 25000 # sane/low default, also used as minimum when estimated fee is used ESTIMATE_FEE_PER_KB = True # when True will use `estimatesmartfee` from bitcoind instead of DEFAULT_FEE_PER_KB ESTIMATE_FEE_CONF_TARGET = 3 ESTIMATE_FEE_MODE = 'CONSERVATIVE' # UI defaults DEFAULT_FEE_FRACTION_REQUIRED = .009 # 0.90% DEFAULT_FEE_FRACTION_PROVIDED = .01 # 1.00% DEFAULT_REQUESTS_TIMEOUT = 20 # 20 seconds DEFAULT_RPC_BATCH_SIZE = 20 # A 1 MB block can hold about 4200 transactions. # Custom exit codes EXITCODE_UPDATE_REQUIRED = 5 DEFAULT_CHECK_ASSET_CONSERVATION = True BACKEND_RAW_TRANSACTIONS_CACHE_SIZE = 20000 BACKEND_RPC_BATCH_NUM_WORKERS = 6 UNDOLOG_MAX_PAST_BLOCKS = 100 #the number of past blocks that we store undolog history DEFAULT_UTXO_LOCKS_MAX_ADDRESSES = 1000 DEFAULT_UTXO_LOCKS_MAX_AGE = 3.0 #in seconds ADDRESS_OPTION_REQUIRE_MEMO = 1 ADDRESS_OPTION_MAX_VALUE = ADDRESS_OPTION_REQUIRE_MEMO # Or list of all the address options OLD_STYLE_API = True API_LIMIT_ROWS = 1000 MEMPOOL_TXCOUNT_UPDATE_LIMIT=60000 MPMA_LIMIT = 1000 # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 #### database.py import apsw import logging logger = logging.getLogger(__name__) import time import collections import copy from counterpartylib.lib import config from counterpartylib.lib import util from counterpartylib.lib import exceptions from counterpartylib.lib import log BLOCK_MESSAGES = [] def rowtracer(cursor, sql): """Converts fetched SQL data into dict-style""" dictionary = {} for index, (name, type_) in enumerate(cursor.getdescription()): dictionary[name] = sql[index] return dictionary def exectracer(cursor, sql, bindings): # This means that all changes to database must use a very simple syntax. # TODO: Need sanity checks here. sql = sql.lower() if sql.startswith('create trigger') or sql.startswith('drop trigger'): #CREATE TRIGGER stmts may include an "insert" or "update" as part of them return True # Parse SQL. array = sql.split('(')[0].split(' ') command = array[0] if 'insert' in sql: category = array[2] elif 'update' in sql: category = array[1] else: #CREATE TABLE, etc return True db = cursor.getconnection() dictionary = {'command': command, 'category': category, 'bindings': bindings} skip_tables = [ 'blocks', 'transactions', 'balances', 'messages', 'mempool', 'assets', 'new_sends', 'new_issuances' # interim table for CIP10 activation ] skip_tables_block_messages = copy.copy(skip_tables) if command == 'update': # List message manually. skip_tables += ['orders', 'bets', 'rps', 'order_matches', 'bet_matches', 'rps_matches'] # Record alteration in database. if category not in skip_tables: log.message(db, bindings['block_index'], command, category, bindings) # Record alteration in computation of message feed hash for the block if category not in skip_tables_block_messages: # don't include asset_longname as part of the messages hash # until subassets are enabled if category == 'issuances' and not util.enabled('subassets'): if isinstance(bindings, dict) and 'asset_longname' in bindings: del bindings['asset_longname'] # don't include memo as part of the messages hash # until enhanced_sends are enabled if category == 'sends' and not util.enabled('enhanced_sends'): if isinstance(bindings, dict) and 'memo' in bindings: del bindings['memo'] sorted_bindings = sorted(bindings.items()) if isinstance(bindings, dict) else [bindings,] BLOCK_MESSAGES.append('{}{}{}'.format(command, category, sorted_bindings)) return True class DatabaseIntegrityError(exceptions.DatabaseError): pass def get_connection(read_only=True, foreign_keys=True, integrity_check=True): """Connects to the SQLite database, returning a db `Connection` object""" logger.debug('Creating connection to `{}`.'.format(config.DATABASE)) if read_only: db = apsw.Connection(config.DATABASE, flags=apsw.SQLITE_OPEN_READONLY) else: db = apsw.Connection(config.DATABASE) cursor = db.cursor() # For integrity, security. if foreign_keys and not read_only: logger.info('Checking database foreign keys...') cursor.execute('''PRAGMA foreign_keys = ON''') cursor.execute('''PRAGMA defer_foreign_keys = ON''') rows = list(cursor.execute('''PRAGMA foreign_key_check''')) if rows: for row in rows: logger.debug('Foreign Key Error: {}'.format(row)) raise exceptions.DatabaseError('Foreign key check failed.') # So that writers don’t block readers. cursor.execute('''PRAGMA journal_mode = WAL''') logger.info('Foreign key check completed.') # Make case sensitive the `LIKE` operator. # For insensitive queries use 'UPPER(fieldname) LIKE value.upper()'' cursor.execute('''PRAGMA case_sensitive_like = ON''') if integrity_check: logger.info('Checking database integrity...') integral = False for i in range(10): # DUPE try: cursor.execute('''PRAGMA integrity_check''') rows = cursor.fetchall() if not (len(rows) == 1 and rows[0][0] == 'ok'): raise exceptions.DatabaseError('Integrity check failed.') integral = True break except DatabaseIntegrityError: time.sleep(1) continue if not integral: raise exceptions.DatabaseError('Could not perform integrity check.') logger.info('Integrity check completed.') db.setrowtrace(rowtracer) db.setexectrace(exectracer) cursor.close() return db def version(db): cursor = db.cursor() user_version = cursor.execute('PRAGMA user_version').fetchall()[0]['user_version'] # manage old user_version if user_version == config.VERSION_MINOR: version_minor = user_version version_major = config.VERSION_MAJOR user_version = (config.VERSION_MAJOR * 1000) + version_minor cursor.execute('PRAGMA user_version = {}'.format(user_version)) else: version_minor = user_version % 1000 version_major = user_version // 1000 return version_major, version_minor def update_version(db): cursor = db.cursor() user_version = (config.VERSION_MAJOR * 1000) + config.VERSION_MINOR cursor.execute('PRAGMA user_version = {}'.format(user_version)) # Syntax?! logger.info('Database version number updated.') def vacuum(db): logger.info('Starting database VACUUM. This may take awhile...') cursor = db.cursor() cursor.execute('VACUUM') logger.info('Database VACUUM completed.') # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 #### exceptions.py #! /usr/bin/python3 class DatabaseError(Exception): pass class TransactionError(Exception): pass class ParseTransactionError(Exception): pass class AssetError (Exception): pass class AssetNameError(AssetError): pass class AssetIDError(AssetError): pass class MessageError(Exception): pass class ComposeError(MessageError): pass class UnpackError(MessageError): pass class ValidateError(MessageError): pass class DecodeError(MessageError): pass class PushDataDecodeError(DecodeError): pass class BTCOnlyError(MessageError): def __init__(self, msg, decodedTx=None): super(BTCOnlyError, self).__init__(msg) self.decodedTx = decodedTx class BalanceError(Exception): pass class EncodingError(Exception): pass class OptionsError(Exception): pass # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 #### log.py import logging logger = logging.getLogger(__name__) import decimal D = decimal.Decimal import binascii import collections import json import time from datetime import datetime from dateutil.tz import tzlocal import os from colorlog import ColoredFormatter from counterpartylib.lib import config from counterpartylib.lib import exceptions from counterpartylib.lib import util class ModuleLoggingFilter(logging.Filter): """ module level logging filter (NodeJS-style), ie: filters="*,-counterpartylib.lib,counterpartylib.lib.api" will log: - counterpartycli.server - counterpartylib.lib.api but will not log: - counterpartylib.lib - counterpartylib.lib.backend.indexd """ def __init__(self, filters): self.filters = str(filters).split(",") self.catchall = "*" in self.filters if self.catchall: self.filters.remove("*") def filter(self, record): """ Determine if specified record should be logged or not """ result = None for filter in self.filters: if filter[:1] == "-": if result is None and ModuleLoggingFilter.ismatch(record, filter[1:]): result = False else: if ModuleLoggingFilter.ismatch(record, filter): result = True if result is None: return self.catchall return result @classmethod def ismatch(cls, record, name): """ Determine if the specified record matches the name, in the same way as original logging.Filter does, ie: 'counterpartylib.lib' will match 'counterpartylib.lib.check' """ nlen = len(name) if nlen == 0: return True elif name == record.name: return True elif record.name.find(name, 0, nlen) != 0: return False return record.name[nlen] == "." ROOT_LOGGER = None def set_logger(logger): global ROOT_LOGGER if ROOT_LOGGER is None: ROOT_LOGGER = logger LOGGING_SETUP = False LOGGING_TOFILE_SETUP = False def set_up(logger, verbose=False, logfile=None, console_logfilter=None): global LOGGING_SETUP global LOGGING_TOFILE_SETUP def set_up_file_logging(): assert logfile max_log_size = 20 * 1024 * 1024 # 20 MB if os.name == 'nt': from counterpartylib.lib import util_windows fileh = util_windows.SanitizedRotatingFileHandler(logfile, maxBytes=max_log_size, backupCount=5) else: fileh = logging.handlers.RotatingFileHandler(logfile, maxBytes=max_log_size, backupCount=5) fileh.setLevel(logging.DEBUG) LOGFORMAT = '%(asctime)s [%(levelname)s] %(message)s' formatter = logging.Formatter(LOGFORMAT, '%Y-%m-%d-T%H:%M:%S%z') fileh.setFormatter(formatter) logger.addHandler(fileh) if LOGGING_SETUP: if logfile and not LOGGING_TOFILE_SETUP: set_up_file_logging() LOGGING_TOFILE_SETUP = True logger.getChild('log.set_up').debug('logging already setup') return LOGGING_SETUP = True log_level = logging.DEBUG if verbose else logging.INFO logger.setLevel(log_level) # Console Logging console = logging.StreamHandler() console.setLevel(log_level) # only add [%(name)s] to LOGFORMAT if we're using console_logfilter LOGFORMAT = '%(log_color)s[%(asctime)s][%(levelname)s]' + ('' if console_logfilter is None else '[%(name)s]') + ' %(message)s%(reset)s' LOGCOLORS = {'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red'} formatter = ColoredFormatter(LOGFORMAT, "%Y-%m-%d %H:%M:%S", log_colors=LOGCOLORS) console.setFormatter(formatter) logger.addHandler(console) if console_logfilter: console.addFilter(ModuleLoggingFilter(console_logfilter)) # File Logging if logfile: set_up_file_logging() LOGGING_TOFILE_SETUP = True # Quieten noisy libraries. requests_log = logging.getLogger("requests") requests_log.setLevel(log_level) requests_log.propagate = False urllib3_log = logging.getLogger('urllib3') urllib3_log.setLevel(log_level) urllib3_log.propagate = False # Disable InsecureRequestWarning import requests requests.packages.urllib3.disable_warnings() def curr_time(): return int(time.time()) def isodt (epoch_time): try: return datetime.fromtimestamp(epoch_time, tzlocal()).isoformat() except OSError: return '' def message(db, block_index, command, category, bindings, tx_hash=None): cursor = db.cursor() # Get last message index. messages = list(cursor.execute('''SELECT * FROM messages WHERE message_index = (SELECT MAX(message_index) from messages)''')) if messages: assert len(messages) == 1 message_index = messages[0]['message_index'] + 1 else: message_index = 0 # Not to be misleading… if block_index == config.MEMPOOL_BLOCK_INDEX: try: del bindings['status'] del bindings['block_index'] del bindings['tx_index'] except KeyError: pass # Handle binary data. items = [] for item in sorted(bindings.items()): if type(item[1]) == bytes: items.append((item[0], binascii.hexlify(item[1]).decode('ascii'))) else: items.append(item) bindings_string = json.dumps(collections.OrderedDict(items)) cursor.execute('insert into messages values(:message_index, :block_index, :command, :category, :bindings, :timestamp)', (message_index, block_index, command, category, bindings_string, curr_time())) # Log only real transactions. if block_index != config.MEMPOOL_BLOCK_INDEX: log(db, command, category, bindings) cursor.close() def log (db, command, category, bindings): cursor = db.cursor() for element in bindings.keys(): try: str(bindings[element]) except KeyError: bindings[element] = '' # Slow?! def output (quantity, asset): try: if asset not in ('fraction', 'leverage'): return str(util.value_out(db, quantity, asset)) + ' ' + asset else: return str(util.value_out(db, quantity, asset)) except exceptions.AssetError: return '' except decimal.DivisionByZero: return '' except TypeError: return '' if command == 'update': if category == 'order': logger.debug('Database: set status of order {} to {}.'.format(bindings['tx_hash'], bindings['status'])) elif category == 'bet': logger.debug('Database: set status of bet {} to {}.'.format(bindings['tx_hash'], bindings['status'])) elif category == 'order_matches': logger.debug('Database: set status of order_match {} to {}.'.format(bindings['order_match_id'], bindings['status'])) elif category == 'bet_matches': logger.debug('Database: set status of bet_match {} to {}.'.format(bindings['bet_match_id'], bindings['status'])) elif category == 'dispensers': escrow_quantity = '' divisible = get_asset_info(cursor, bindings['asset'])['divisible'] if divisible: if "escrow_quantity" in bindings: escrow_quantity = "{:.8f}".format(bindings["escrow_quantity"]/config.UNIT) if ("action" in bindings) and bindings["action"] == 'refill dispenser': logger.info("Dispenser: {} refilled a dispenser with {} {}".format(bindings["source"],escrow_quantity,bindings["asset"])) elif "prev_status" in bindings: #There was a dispense if bindings["prev_status"] == 0: if bindings["status"] == 10: logger.info("Dispenser: {} closed dispenser for {} (dispenser empty)".format(bindings["source"],bindings["asset"])) elif bindings["status"] == 10: #Address closed the dispenser logger.info("Dispenser: {} closed dispenser for {} (operator closed)".format(bindings["source"],bindings["asset"])) # TODO: elif category == 'balances': # logger.debug('Database: set balance of {} in {} to {}.'.format(bindings['address'], bindings['asset'], output(bindings['quantity'], bindings['asset']).split(' ')[0])) elif command == 'insert': if category == 'credits': logger.debug('Credit: {} to {} #{}# <{}>'.format(output(bindings['quantity'], bindings['asset']), bindings['address'], bindings['action'], bindings['event'])) elif category == 'debits': logger.debug('Debit: {} from {} #{}# <{}>'.format(output(bindings['quantity'], bindings['asset']), bindings['address'], bindings['action'], bindings['event'])) elif category == 'sends': logger.info('Send: {} from {} to {} ({}) [{}]'.format(output(bindings['quantity'], bindings['asset']), bindings['source'], bindings['destination'], bindings['tx_hash'], bindings['status'])) elif category == 'orders': logger.info('Order: {} ordered {} for {} in {} blocks, with a provided fee of {:.8f} {} and a required fee of {:.8f} {} ({}) [{}]'.format(bindings['source'], output(bindings['give_quantity'], bindings['give_asset']), output(bindings['get_quantity'], bindings['get_asset']), bindings['expiration'], bindings['fee_provided'] / config.UNIT, config.BTC, bindings['fee_required'] / config.UNIT, config.BTC, bindings['tx_hash'], bindings['status'])) elif category == 'order_matches': logger.info('Order Match: {} for {} ({}) [{}]'.format(output(bindings['forward_quantity'], bindings['forward_asset']), output(bindings['backward_quantity'], bindings['backward_asset']), bindings['id'], bindings['status'])) elif category == 'btcpays': logger.info('{} Payment: {} paid {} to {} for order match {} ({}) [{}]'.format(config.BTC, bindings['source'], output(bindings['btc_amount'], config.BTC), bindings['destination'], bindings['order_match_id'], bindings['tx_hash'], bindings['status'])) elif category == 'issuances': if (get_asset_issuances_quantity(cursor, bindings["asset"]) == 0) or (bindings['quantity'] > 0): #This is the first issuance or the creation of more supply, so we have to log the creation of the token if bindings['divisible']: divisibility = 'divisible' unit = config.UNIT else: divisibility = 'indivisible' unit = 1 try: quantity = util.value_out(cursor, bindings['quantity'], None, divisible=bindings['divisible']) except Exception as e: quantity = '?' if 'asset_longname' in bindings and bindings['asset_longname'] is not None: logger.info('Subasset Issuance: {} created {} of {} subasset {} as numeric asset {} ({}) [{}]'.format(bindings['source'], quantity, divisibility, bindings['asset_longname'], bindings['asset'], bindings['tx_hash'], bindings['status'])) else: logger.info('Issuance: {} created {} of {} asset {} ({}) [{}]'.format(bindings['source'], quantity, divisibility, bindings['asset'], bindings['tx_hash'], bindings['status'])) if bindings['locked']: lock_issuance = get_lock_issuance(cursor, bindings["asset"]) if (lock_issuance == None) or (lock_issuance['tx_hash'] == bindings['tx_hash']): logger.info('Issuance: {} locked asset {} ({}) [{}]'.format(bindings['source'], bindings['asset'], bindings['tx_hash'], bindings['status'])) if bindings['transfer']: logger.info('Issuance: {} transfered asset {} to {} ({}) [{}]'.format(bindings['source'], bindings['asset'], bindings['issuer'], bindings['tx_hash'], bindings['status'])) elif category == 'broadcasts': if bindings['locked']: logger.info('Broadcast: {} locked his feed ({}) [{}]'.format(bindings['source'], bindings['tx_hash'], bindings['status'])) else: logger.info('Broadcast: ' + bindings['source'] + ' at ' + isodt(bindings['timestamp']) + ' with a fee of {}%'.format(output(D(bindings['fee_fraction_int'] / 1e8), 'fraction')) + ' (' + bindings['tx_hash'] + ')' + ' [{}]'.format(bindings['status'])) elif category == 'bets': logger.info('Bet: {} against {}, by {}, on {}'.format(output(bindings['wager_quantity'], config.XCP), output(bindings['counterwager_quantity'], config.XCP), bindings['source'], bindings['feed_address'])) elif category == 'bet_matches': placeholder = '' if bindings['target_value'] >= 0: # Only non‐negative values are valid. placeholder = ' that ' + str(output(bindings['target_value'], 'value')) if bindings['leverage']: placeholder += ', leveraged {}x'.format(output(bindings['leverage'] / 5040, 'leverage')) logger.info('Bet Match: {} for {} against {} for {} on {} at {}{} ({}) [{}]'.format(util.BET_TYPE_NAME[bindings['tx0_bet_type']], output(bindings['forward_quantity'], config.XCP), util.BET_TYPE_NAME[bindings['tx1_bet_type']], output(bindings['backward_quantity'], config.XCP), bindings['feed_address'], isodt(bindings['deadline']), placeholder, bindings['id'], bindings['status'])) elif category == 'dividends': logger.info('Dividend: {} paid {} per unit of {} ({}) [{}]'.format(bindings['source'], output(bindings['quantity_per_unit'], bindings['dividend_asset']), bindings['asset'], bindings['tx_hash'], bindings['status'])) elif category == 'burns': logger.info('Burn: {} burned {} for {} ({}) [{}]'.format(bindings['source'], output(bindings['burned'], config.BTC), output(bindings['earned'], config.XCP), bindings['tx_hash'], bindings['status'])) elif category == 'cancels': logger.info('Cancel: {} ({}) [{}]'.format(bindings['offer_hash'], bindings['tx_hash'], bindings['status'])) elif category == 'rps': log_message = 'RPS: {} opens game with {} possible moves and a wager of {}'.format(bindings['source'], bindings['possible_moves'], output(bindings['wager'], 'XCP')) logger.info(log_message) elif category == 'rps_matches': log_message = 'RPS Match: {} is playing a {}-moves game with {} with a wager of {} ({}) [{}]'.format(bindings['tx0_address'], bindings['possible_moves'], bindings['tx1_address'], output(bindings['wager'], 'XCP'), bindings['id'], bindings['status']) logger.info(log_message) elif category == 'rpsresolves': if bindings['status'] == 'valid': rps_matches = list(cursor.execute('''SELECT * FROM rps_matches WHERE id = ?''', (bindings['rps_match_id'],))) assert len(rps_matches) == 1 rps_match = rps_matches[0] log_message = 'RPS Resolved: {} is playing {} on a {}-moves game with {} with a wager of {} ({}) [{}]'.format(rps_match['tx0_address'], bindings['move'], rps_match['possible_moves'], rps_match['tx1_address'], output(rps_match['wager'], 'XCP'), rps_match['id'], rps_match['status']) else: log_message = 'RPS Resolved: {} [{}]'.format(bindings['tx_hash'], bindings['status']) logger.info(log_message) elif category == 'order_expirations': logger.info('Expired order: {}'.format(bindings['order_hash'])) elif category == 'order_match_expirations': logger.info('Expired Order Match awaiting payment: {}'.format(bindings['order_match_id'])) elif category == 'bet_expirations': logger.info('Expired bet: {}'.format(bindings['bet_hash'])) elif category == 'bet_match_expirations': logger.info('Expired Bet Match: {}'.format(bindings['bet_match_id'])) elif category == 'bet_match_resolutions': # DUPE cfd_type_id = util.BET_TYPE_ID['BullCFD'] + util.BET_TYPE_ID['BearCFD'] equal_type_id = util.BET_TYPE_ID['Equal'] + util.BET_TYPE_ID['NotEqual'] if bindings['bet_match_type_id'] == cfd_type_id: if bindings['settled']: logger.info('Bet Match Settled: {} credited to the bull, {} credited to the bear, and {} credited to the feed address ({})'.format(output(bindings['bull_credit'], config.XCP), output(bindings['bear_credit'], config.XCP), output(bindings['fee'], config.XCP), bindings['bet_match_id'])) else: logger.info('Bet Match Force‐Liquidated: {} credited to the bull, {} credited to the bear, and {} credited to the feed address ({})'.format(output(bindings['bull_credit'], config.XCP), output(bindings['bear_credit'], config.XCP), output(bindings['fee'], config.XCP), bindings['bet_match_id'])) elif bindings['bet_match_type_id'] == equal_type_id: logger.info('Bet Match Settled: {} won the pot of {}; {} credited to the feed address ({})'.format(bindings['winner'], output(bindings['escrow_less_fee'], config.XCP), output(bindings['fee'], config.XCP), bindings['bet_match_id'])) elif category == 'rps_expirations': logger.info('Expired RPS: {}'.format(bindings['rps_hash'])) elif category == 'rps_match_expirations': logger.info('Expired RPS Match: {}'.format(bindings['rps_match_id'])) elif category == 'destructions': asset_info = get_asset_info(cursor, bindings['asset']) quantity = bindings['quantity'] if asset_info['divisible']: quantity = "{:.8f}".format(quantity/config.UNIT) logger.info('Destruction: {} destroyed {} {} with tag ‘{}’({}) [{}]'.format(bindings['source'], quantity, bindings['asset'], bindings['tag'], bindings['tx_hash'], bindings['status'])) elif category == 'dispensers': each_price = bindings['satoshirate'] currency = config.BTC dispenser_label = 'dispenser' escrow_quantity = bindings['escrow_quantity'] give_quantity = bindings['give_quantity'] if (bindings['oracle_address'] != None) and util.enabled('oracle_dispensers'): each_price = "{:.2f}".format(each_price/100.0) oracle_last_price, oracle_fee, currency, oracle_last_updated = util.get_oracle_last_price(db, bindings['oracle_address'], bindings['block_index']) dispenser_label = 'oracle dispenser using {}'.format(bindings['oracle_address']) else: each_price = "{:.8f}".format(each_price/config.UNIT) divisible = get_asset_info(cursor, bindings['asset'])['divisible'] if divisible: escrow_quantity = "{:.8f}".format(escrow_quantity/config.UNIT) give_quantity = "{:.8f}".format(give_quantity/config.UNIT) if bindings['status'] == 0: logger.info('Dispenser: {} opened a {} for asset {} with {} balance, giving {} {} for each {} {}'.format(bindings['source'], dispenser_label, bindings['asset'], escrow_quantity, give_quantity, bindings['asset'], each_price, currency)) elif bindings['status'] == 1: logger.info('Dispenser: {} (empty address) opened a {} for asset {} with {} balance, giving {} {} for each {} {}'.format(bindings['source'], dispenser_label, bindings['asset'], escrow_quantity, give_quantity, bindings['asset'], each_price, currency)) elif bindings['status'] == 10: logger.info('Dispenser: {} closed a {} for asset {}'.format(bindings['source'], dispenser_label, bindings['asset'])) elif category == 'dispenses': cursor.execute('SELECT * FROM dispensers WHERE tx_hash=:tx_hash', { 'tx_hash': bindings['dispenser_tx_hash'] }) dispensers = cursor.fetchall() dispenser = dispensers[0] if (dispenser["oracle_address"] != None) and util.enabled('oracle_dispensers'): tx_btc_amount = get_tx_info(cursor, bindings['tx_hash'])/config.UNIT oracle_last_price, oracle_fee, oracle_fiat_label, oracle_last_price_updated = util.get_oracle_last_price(db, dispenser["oracle_address"], bindings['block_index']) fiatpaid = round(tx_btc_amount*oracle_last_price,2) logger.info('Dispense: {} from {} to {} for {:.8f} {} ({} {}) ({})'.format(output(bindings['dispense_quantity'], bindings['asset']), bindings['source'], bindings['destination'], tx_btc_amount, config.BTC, fiatpaid, oracle_fiat_label, bindings['tx_hash'])) else: logger.info('Dispense: {} from {} to {} ({})'.format(output(bindings['dispense_quantity'], bindings['asset']), bindings['source'], bindings['destination'], bindings['tx_hash'])) cursor.close() def get_lock_issuance(cursor, asset): cursor.execute('''SELECT * FROM issuances \ WHERE (status = ? AND asset = ? AND locked = ?) ORDER BY tx_index ASC''', ('valid', asset, True)) issuances = cursor.fetchall() if len(issuances) > 0: return issuances[0] return None def get_asset_issuances_quantity(cursor, asset): cursor.execute('''SELECT COUNT(*) AS issuances_count FROM issuances \ WHERE (status = ? AND asset = ?) ORDER BY tx_index DESC''', ('valid', asset)) issuances = cursor.fetchall() return issuances[0]['issuances_count'] def get_asset_info(cursor, asset): if asset == config.BTC or asset == config.XCP: return {'divisible':True} cursor.execute('''SELECT * FROM issuances \ WHERE (status = ? AND asset = ?) ORDER BY tx_index DESC''', ('valid', asset)) issuances = cursor.fetchall() return issuances[0] def get_tx_info(cursor, tx_hash): cursor.execute('SELECT * FROM transactions WHERE tx_hash=:tx_hash', { 'tx_hash': tx_hash }) transactions = cursor.fetchall() transaction = transactions[0] return transaction["btc_amount"] # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 #### message_type.py import logging logger = logging.getLogger(__name__) import struct from counterpartylib.lib import config from counterpartylib.lib import util def pack(message_type_id, block_index=None): # pack message ID into 1 byte if not zero if util.enabled('short_tx_type_id', block_index) and message_type_id > 0 and message_type_id < 256: return struct.pack(config.SHORT_TXTYPE_FORMAT, message_type_id) # pack into 4 bytes return struct.pack(config.TXTYPE_FORMAT, message_type_id) # retuns both the message type id and the remainder of the message data def unpack(packed_data, block_index=None): message_type_id = None message_remainder = None if len(packed_data) > 1: # try to read 1 byte first if util.enabled('short_tx_type_id', block_index): message_type_id = struct.unpack(config.SHORT_TXTYPE_FORMAT, packed_data[:1])[0] if message_type_id > 0: message_remainder = packed_data[1:] return (message_type_id, message_remainder) # First message byte was 0. We will read 4 bytes if len(packed_data) > 4: message_type_id = struct.unpack(config.TXTYPE_FORMAT, packed_data[:4])[0] message_remainder = packed_data[4:] return (message_type_id, message_remainder) #### script.py """ None of the functions/objects in this module need be passed `db`. Naming convention: a `pub` is either a pubkey or a pubkeyhash """ import hashlib import bitcoin as bitcoinlib import binascii from bitcoin.core.key import CPubKey from bitcoin.bech32 import CBech32Data from counterpartylib.lib import util from counterpartylib.lib import config from counterpartylib.lib import exceptions b58_digits = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' class InputError (Exception): pass class AddressError(Exception): pass class MultiSigAddressError(AddressError): pass class VersionByteError (AddressError): pass class Base58Error (AddressError): pass class Base58ChecksumError (Base58Error): pass def validate(address, allow_p2sh=True): """Make sure the address is valid. May throw `AddressError`. """ # Get array of pubkeyhashes to check. if is_multisig(address): pubkeyhashes = pubkeyhash_array(address) else: pubkeyhashes = [address] # Check validity by attempting to decode. for pubkeyhash in pubkeyhashes: try: if util.enabled('segwit_support'): if not is_bech32(pubkeyhash): base58_check_decode(pubkeyhash, config.ADDRESSVERSION) else: base58_check_decode(pubkeyhash, config.ADDRESSVERSION) except VersionByteError as e: if not allow_p2sh: raise e base58_check_decode(pubkeyhash, config.P2SH_ADDRESSVERSION) except Base58Error as e: if not util.enabled('segwit_support') or not is_bech32(pubkeyhash): raise e def base58_encode(binary): """Encode the address in base58.""" # Convert big‐endian bytes to integer n = int('0x0' + util.hexlify(binary), 16) # Divide that integer into base58 res = [] while n > 0: n, r = divmod(n, 58) res.append(b58_digits[r]) res = ''.join(res[::-1]) return res def base58_check_encode(original, version): """Check if base58 encoding is valid.""" b = binascii.unhexlify(bytes(original, 'utf-8')) d = version + b binary = d + util.dhash(d)[:4] res = base58_encode(binary) # Encode leading zeros as base58 zeros czero = 0 pad = 0 for c in d: if c == czero: pad += 1 else: break address = b58_digits[0] * pad + res if original != util.hexlify(base58_check_decode(address, version)): raise AddressError('encoded address does not decode properly') return address def base58_decode(s): # Convert the string to an integer n = 0 for c in s: n *= 58 if c not in b58_digits: raise Base58Error('Not a valid Base58 character: ‘{}’'.format(c)) digit = b58_digits.index(c) n += digit # Convert the integer to bytes h = '%x' % n if len(h) % 2: h = '0' + h res = binascii.unhexlify(h.encode('utf8')) # Add padding back. pad = 0 for c in s[:-1]: if c == b58_digits[0]: pad += 1 else: break k = b'\x00' * pad + res return k def base58_check_decode_parts(s): """Decode from base58 and return parts.""" k = base58_decode(s) addrbyte, data, chk0 = k[0:1], k[1:-4], k[-4:] return addrbyte, data, chk0 def base58_check_decode(s, version): """Decode from base58 and return data part.""" addrbyte, data, chk0 = base58_check_decode_parts(s) if addrbyte != version: raise VersionByteError('incorrect version byte') chk1 = util.dhash(addrbyte + data)[:4] if chk0 != chk1: raise Base58ChecksumError('Checksum mismatch: 0x{} ≠ 0x{}'.format(util.hexlify(chk0), util.hexlify(chk1))) return data def is_multisig(address): """Check if the address is multi‐signature.""" array = address.split('_') return len(array) > 1 def is_p2sh(address): if is_multisig(address): return False try: base58_check_decode(address, config.P2SH_ADDRESSVERSION) return True except (VersionByteError, Base58Error): return False def is_bech32(address): try: b32data = CBech32Data(address) return True except: return False def is_fully_valid(pubkey_bin): """Check if the public key is valid.""" cpubkey = CPubKey(pubkey_bin) return cpubkey.is_fullyvalid def make_canonical(address): """Return canonical version of the address.""" if is_multisig(address): signatures_required, pubkeyhashes, signatures_possible = extract_array(address) try: [base58_check_decode(pubkeyhash, config.ADDRESSVERSION) for pubkeyhash in pubkeyhashes] except Base58Error: raise MultiSigAddressError('Multi‐signature address must use PubKeyHashes, not public keys.') return construct_array(signatures_required, pubkeyhashes, signatures_possible) else: return address def test_array(signatures_required, pubs, signatures_possible): """Check if multi‐signature data is valid.""" try: signatures_required, signatures_possible = int(signatures_required), int(signatures_possible) except (ValueError, TypeError): raise MultiSigAddressError('Signature values not integers.') if signatures_required < 1 or signatures_required > 3: raise MultiSigAddressError('Invalid signatures_required.') if signatures_possible < 2 or signatures_possible > 3: raise MultiSigAddressError('Invalid signatures_possible.') for pubkey in pubs: if '_' in pubkey: raise MultiSigAddressError('Invalid characters in pubkeys/pubkeyhashes.') if signatures_possible != len(pubs): raise InputError('Incorrect number of pubkeys/pubkeyhashes in multi‐signature address.') def construct_array(signatures_required, pubs, signatures_possible): """Create a multi‐signature address.""" test_array(signatures_required, pubs, signatures_possible) address = '_'.join([str(signatures_required)] + sorted(pubs) + [str(signatures_possible)]) return address def extract_array(address): """Extract data from multi‐signature address.""" assert is_multisig(address) array = address.split('_') signatures_required, pubs, signatures_possible = array[0], sorted(array[1:-1]), array[-1] test_array(signatures_required, pubs, signatures_possible) return int(signatures_required), pubs, int(signatures_possible) def pubkeyhash_array(address): """Return PubKeyHashes from an address.""" signatures_required, pubs, signatures_possible = extract_array(address) if not all([is_pubkeyhash(pub) for pub in pubs]): raise MultiSigAddressError('Invalid PubKeyHashes. Multi‐signature address must use PubKeyHashes, not public keys.') pubkeyhashes = pubs return pubkeyhashes def hash160(x): x = hashlib.sha256(x).digest() m = hashlib.new('ripemd160') m.update(x) return m.digest() def pubkey_to_pubkeyhash(pubkey): """Convert public key to PubKeyHash.""" pubkeyhash = hash160(pubkey) pubkey = base58_check_encode(binascii.hexlify(pubkeyhash).decode('utf-8'), config.ADDRESSVERSION) return pubkey def pubkey_to_p2whash(pubkey): """Convert public key to PayToWitness.""" pubkeyhash = hash160(pubkey) pubkey = CBech32Data.from_bytes(0, pubkeyhash) return str(pubkey) def bech32_to_scripthash(address): bech32 = CBech32Data(address) return bytes(bech32) def get_asm(scriptpubkey): # TODO: When is an exception thrown here? Can this `try` block be tighter? Can it be replaced by a conditional? try: asm = [] # TODO: This should be `for element in scriptpubkey`. for op in scriptpubkey: if type(op) == bitcoinlib.core.script.CScriptOp: # TODO: `op = element` asm.append(str(op)) else: # TODO: `data = element` (?) asm.append(op) except bitcoinlib.core.script.CScriptTruncatedPushDataError: raise exceptions.PushDataDecodeError('invalid pushdata due to truncation') if not asm: raise exceptions.DecodeError('empty output') return asm def get_checksig(asm): if len(asm) == 5 and asm[0] == 'OP_DUP' and asm[1] == 'OP_HASH160' and asm[3] == 'OP_EQUALVERIFY' and asm[4] == 'OP_CHECKSIG': pubkeyhash = asm[2] if type(pubkeyhash) == bytes: return pubkeyhash raise exceptions.DecodeError('invalid OP_CHECKSIG') def get_checkmultisig(asm): # N‐of‐2 if len(asm) == 5 and asm[3] == 2 and asm[4] == 'OP_CHECKMULTISIG': pubkeys, signatures_required = asm[1:3], asm[0] if all([type(pubkey) == bytes for pubkey in pubkeys]): return pubkeys, signatures_required # N‐of‐3 if len(asm) == 6 and asm[4] == 3 and asm[5] == 'OP_CHECKMULTISIG': pubkeys, signatures_required = asm[1:4], asm[0] if all([type(pubkey) == bytes for pubkey in pubkeys]): return pubkeys, signatures_required raise exceptions.DecodeError('invalid OP_CHECKMULTISIG') def scriptpubkey_to_address(scriptpubkey): asm = get_asm(scriptpubkey) if asm[-1] == 'OP_CHECKSIG': try: checksig = get_checksig(asm) except exceptions.DecodeError: # coinbase return None return base58_check_encode(binascii.hexlify(checksig).decode('utf-8'), config.ADDRESSVERSION) elif asm[-1] == 'OP_CHECKMULTISIG': pubkeys, signatures_required = get_checkmultisig(asm) pubkeyhashes = [pubkey_to_pubkeyhash(pubkey) for pubkey in pubkeys] return construct_array(signatures_required, pubkeyhashes, len(pubkeyhashes)) elif len(asm) == 3 and asm[0] == 'OP_HASH160' and asm[2] == 'OP_EQUAL': return base58_check_encode(binascii.hexlify(asm[1]).decode('utf-8'), config.P2SH_ADDRESSVERSION) return None # TODO: Use `python-bitcointools` instead. (Get rid of `pycoin` dependency.) from pycoin.encoding import wif_to_tuple_of_secret_exponent_compressed, public_pair_to_sec, EncodingError from pycoin.ecdsa import generator_secp256k1, public_pair_for_secret_exponent class AltcoinSupportError (Exception): pass def private_key_to_public_key(private_key_wif): """Convert private key to public key.""" if config.TESTNET: allowable_wif_prefixes = [config.PRIVATEKEY_VERSION_TESTNET] elif config.REGTEST: allowable_wif_prefixes = [config.PRIVATEKEY_VERSION_REGTEST] else: allowable_wif_prefixes = [config.PRIVATEKEY_VERSION_MAINNET] try: secret_exponent, compressed = wif_to_tuple_of_secret_exponent_compressed( private_key_wif, allowable_wif_prefixes=allowable_wif_prefixes) except EncodingError: raise AltcoinSupportError('pycoin: unsupported WIF prefix') public_pair = public_pair_for_secret_exponent(generator_secp256k1, secret_exponent) public_key = public_pair_to_sec(public_pair, compressed=compressed) public_key_hex = binascii.hexlify(public_key).decode('utf-8') return public_key_hex def is_pubkeyhash(monosig_address): """Check if PubKeyHash is valid P2PKH address. """ assert not is_multisig(monosig_address) try: base58_check_decode(monosig_address, config.ADDRESSVERSION) return True except (Base58Error, VersionByteError): return False def make_pubkeyhash(address): """Create a new PubKeyHash.""" if is_multisig(address): signatures_required, pubs, signatures_possible = extract_array(address) pubkeyhashes = [] for pub in pubs: if is_pubkeyhash(pub): pubkeyhash = pub else: pubkeyhash = pubkey_to_pubkeyhash(binascii.unhexlify(bytes(pub, 'utf-8'))) pubkeyhashes.append(pubkeyhash) pubkeyhash_address = construct_array(signatures_required, pubkeyhashes, signatures_possible) else: if util.enabled('segwit_support') and is_bech32(address): pubkeyhash_address = address # Some bech32 addresses are valid base58 data elif is_pubkeyhash(address): pubkeyhash_address = address elif is_p2sh(address): pubkeyhash_address = address else: pubkeyhash_address = pubkey_to_pubkeyhash(binascii.unhexlify(bytes(address, 'utf-8'))) return pubkeyhash_address def extract_pubkeys(pub): """Assume pubkey if not pubkeyhash. (Check validity later.)""" pubkeys = [] if is_multisig(pub): _, pubs, _ = extract_array(pub) for pub in pubs: if not is_pubkeyhash(pub): pubkeys.append(pub) elif is_p2sh(pub): pass elif util.enabled('segwit_support') and is_bech32(pub): pass else: if not is_pubkeyhash(pub): pubkeys.append(pub) return pubkeys # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 #### transaction.py """ Construct and serialize the Bitcoin transactions that are Counterparty transactions. This module contains no consensus‐critical code. """ import os import sys import binascii import json import hashlib import re import time import decimal import logging logger = logging.getLogger(__name__) import requests import bitcoin as bitcoinlib from bitcoin.core.script import CScript from bitcoin.core import x, CTransaction from bitcoin.core import b2lx import cachetools import math import io from counterpartylib.lib import blocks from counterpartylib.lib import config from counterpartylib.lib import exceptions from counterpartylib.lib import util from counterpartylib.lib import script from counterpartylib.lib import backend from counterpartylib.lib import arc4 from counterpartylib.lib.transaction_helper import serializer, p2sh_encoding # Constants OP_RETURN = b'\x6a' OP_PUSHDATA1 = b'\x4c' OP_DUP = b'\x76' OP_HASH160 = b'\xa9' OP_EQUALVERIFY = b'\x88' OP_CHECKSIG = b'\xac' OP_0 = b'\x00' OP_1 = b'\x51' OP_2 = b'\x52' OP_3 = b'\x53' OP_CHECKMULTISIG = b'\xae' OP_EQUAL = b'\x87' D = decimal.Decimal # UTXO_LOCKS is None or DictCache per address UTXO_LOCKS = None # set higher than the max number of UTXOs we should expect to # manage in an aging cache for any one source address, at any one period UTXO_LOCKS_PER_ADDRESS_MAXSIZE = 5000 # UTXO_P2SH_ENCODING_LOCKS is TTLCache for UTXOs that are used for chaining p2sh encoding # instead of a simple (txid, vout) key we use [(vin.prevout.hash, vin.prevout.n) for vin tx1.vin] UTXO_P2SH_ENCODING_LOCKS = None # we cache the make_outkey_vin to avoid having to fetch raw txs too often UTXO_P2SH_ENCODING_LOCKS_CACHE = None def initialise(): global UTXO_LOCKS, UTXO_P2SH_ENCODING_LOCKS, UTXO_P2SH_ENCODING_LOCKS_CACHE if config.UTXO_LOCKS_MAX_ADDRESSES > 0: # initialize if configured UTXO_LOCKS = util.DictCache(size=config.UTXO_LOCKS_MAX_ADDRESSES) UTXO_P2SH_ENCODING_LOCKS = cachetools.TTLCache(10000, 180) UTXO_P2SH_ENCODING_LOCKS_CACHE = cachetools.TTLCache(1000, 600) def print_coin(coin): return 'amount: {:.8f}; txid: {}; vout: {}; confirmations: {}'.format(coin['amount'], coin['txid'], coin['vout'], coin.get('confirmations', '?')) # simplify and make deterministic def chunks(l, n): """ Yield successive n‐sized chunks from l. """ for i in range(0, len(l), n): yield l[i:i+n] def make_outkey(output): return '{}{}'.format(output['txid'], output['vout']) def make_outkey_vin_txid(txid, vout): global UTXO_P2SH_ENCODING_LOCKS_CACHE if (txid, vout) not in UTXO_P2SH_ENCODING_LOCKS_CACHE: txhex = backend.getrawtransaction(txid, verbose=False) UTXO_P2SH_ENCODING_LOCKS_CACHE[(txid, vout)] = make_outkey_vin(txhex, vout) return UTXO_P2SH_ENCODING_LOCKS_CACHE[(txid, vout)] def make_outkey_vin(txhex, vout): txbin = binascii.unhexlify(txhex) if isinstance(txhex, str) else txhex assert isinstance(vout, int) tx = bitcoinlib.core.CTransaction.deserialize(txbin) outkey = [(vin.prevout.hash, vin.prevout.n) for vin in tx.vin] outkey = hashlib.sha256(("%s%s" % (outkey, vout)).encode('ascii')).digest() return outkey def get_dust_return_pubkey(source, provided_pubkeys, encoding): """Return the pubkey to which dust from data outputs will be sent. This pubkey is used in multi-sig data outputs (as the only real pubkey) to make those the outputs spendable. It is derived from the source address, so that the dust is spendable by the creator of the transaction. """ # Get hex dust return pubkey. if script.is_multisig(source): a, self_pubkeys, b = script.extract_array(backend.multisig_pubkeyhashes_to_pubkeys(source, provided_pubkeys)) dust_return_pubkey_hex = self_pubkeys[0] else: dust_return_pubkey_hex = backend.pubkeyhash_to_pubkey(source, provided_pubkeys) # Convert hex public key into the (binary) dust return pubkey. try: dust_return_pubkey = binascii.unhexlify(dust_return_pubkey_hex) except binascii.Error: raise script.InputError('Invalid private key.') return dust_return_pubkey def construct_coin_selection(encoding, data_array, source, allow_unconfirmed_inputs, unspent_tx_hash, custom_inputs, fee_per_kb, estimate_fee_per_kb, estimate_fee_per_kb_nblocks, exact_fee, size_for_fee, fee_provided, destination_btc_out, data_btc_out, regular_dust_size, disable_utxo_locks): global UTXO_LOCKS, UTXO_P2SH_ENCODING_LOCKS # Array of UTXOs, as retrieved by listunspent function from bitcoind if custom_inputs: use_inputs = unspent = custom_inputs else: if unspent_tx_hash is not None: unspent = backend.get_unspent_txouts(source, unconfirmed=allow_unconfirmed_inputs, unspent_tx_hash=unspent_tx_hash) else: unspent = backend.get_unspent_txouts(source, unconfirmed=allow_unconfirmed_inputs) filter_unspents_utxo_locks = [] if UTXO_LOCKS is not None and source in UTXO_LOCKS: filter_unspents_utxo_locks = UTXO_LOCKS[source].keys() filter_unspents_p2sh_locks = UTXO_P2SH_ENCODING_LOCKS.keys() # filter out any locked UTXOs to prevent creating transactions that spend the same UTXO when they're created at the same time filtered_unspent = [] for output in unspent: if make_outkey(output) not in filter_unspents_utxo_locks and make_outkey_vin_txid(output['txid'], output['vout']) not in filter_unspents_p2sh_locks: filtered_unspent.append(output) unspent = filtered_unspent unspent = backend.sort_unspent_txouts(unspent) logger.debug('Sorted candidate UTXOs: {}'.format([print_coin(coin) for coin in unspent])) use_inputs = unspent # use backend estimated fee_per_kb if estimate_fee_per_kb: estimated_fee_per_kb = backend.fee_per_kb(estimate_fee_per_kb_nblocks, config.ESTIMATE_FEE_MODE) if estimated_fee_per_kb is not None: fee_per_kb = max(estimated_fee_per_kb, fee_per_kb) # never drop below the default fee_per_kb logger.debug('Fee/KB {:.8f}'.format(fee_per_kb / config.UNIT)) inputs = [] btc_in = 0 change_quantity = 0 sufficient_funds = False final_fee = fee_per_kb desired_input_count = 1 if encoding == 'multisig' and data_array and util.enabled('bytespersigop'): desired_input_count = len(data_array) * 2 # pop inputs until we can pay for the fee for coin in use_inputs: logger.debug('New input: {}'.format(print_coin(coin))) inputs.append(coin) btc_in += round(coin['amount'] * config.UNIT) # If exact fee is specified, use that. Otherwise, calculate size of tx # and base fee on that (plus provide a minimum fee for selling BTC). size = 181 * len(inputs) + size_for_fee + 10 if exact_fee: final_fee = exact_fee else: necessary_fee = int(size / 1000 * fee_per_kb) final_fee = max(fee_provided, necessary_fee) logger.getChild('p2shdebug').debug('final_fee inputs: %d size: %d final_fee %s' % (len(inputs), size, final_fee)) # Check if good. btc_out = destination_btc_out + data_btc_out change_quantity = btc_in - (btc_out + final_fee) logger.debug('Size: {} Fee: {:.8f} Change quantity: {:.8f} BTC'.format(size, final_fee / config.UNIT, change_quantity / config.UNIT)) # If change is necessary, must not be a dust output. if change_quantity == 0 or change_quantity >= regular_dust_size: sufficient_funds = True if len(inputs) >= desired_input_count: break if not sufficient_funds: # Approximate needed change, fee by with most recently calculated # quantities. btc_out = destination_btc_out + data_btc_out total_btc_out = btc_out + max(change_quantity, 0) + final_fee raise exceptions.BalanceError('Insufficient {} at address {}. (Need approximately {} {}.) To spend unconfirmed coins, use the flag `--unconfirmed`. (Unconfirmed coins cannot be spent from multi‐sig addresses.)'.format(config.BTC, source, total_btc_out / config.UNIT, config.BTC)) # Lock the source's inputs (UTXOs) chosen for this transaction if UTXO_LOCKS is not None and not disable_utxo_locks: if source not in UTXO_LOCKS: UTXO_LOCKS[source] = cachetools.TTLCache( UTXO_LOCKS_PER_ADDRESS_MAXSIZE, config.UTXO_LOCKS_MAX_AGE) for input in inputs: UTXO_LOCKS[source][make_outkey(input)] = input logger.debug("UTXO locks: Potentials ({}): {}, Used: {}, locked UTXOs: {}".format( len(unspent), [make_outkey(coin) for coin in unspent], [make_outkey(input) for input in inputs], list(UTXO_LOCKS[source].keys()))) # ensure inputs have scriptPubKey # this is not provided by indexd inputs = backend.ensure_script_pub_key_for_inputs(inputs) return inputs, change_quantity, btc_in, final_fee def select_any_coin_from_source(source, allow_unconfirmed_inputs=True, disable_utxo_locks=False): ''' Get the first (biggest) input from the source address ''' global UTXO_LOCKS # Array of UTXOs, as retrieved by listunspent function from bitcoind unspent = backend.get_unspent_txouts(source, unconfirmed=allow_unconfirmed_inputs) filter_unspents_utxo_locks = [] if UTXO_LOCKS is not None and source in UTXO_LOCKS: filter_unspents_utxo_locks = UTXO_LOCKS[source].keys() # filter out any locked UTXOs to prevent creating transactions that spend the same UTXO when they're created at the same time filtered_unspent = [] for output in unspent: if make_outkey(output) not in filter_unspents_utxo_locks: filtered_unspent.append(output) unspent = filtered_unspent # sort unspent = backend.sort_unspent_txouts(unspent) # use the first input input = unspent[0] if input is None: return None # Lock the source's inputs (UTXOs) chosen for this transaction if UTXO_LOCKS is not None and not disable_utxo_locks: if source not in UTXO_LOCKS: UTXO_LOCKS[source] = cachetools.TTLCache( UTXO_LOCKS_PER_ADDRESS_MAXSIZE, config.UTXO_LOCKS_MAX_AGE) UTXO_LOCKS[source][make_outkey(input)] = input return input def return_result(tx_hexes, old_style_api): tx_hexes = list(filter(None, tx_hexes)) # filter out None if old_style_api: if len(tx_hexes) != 1: raise Exception("Can't do 2 TXs with old_style_api") return tx_hexes[0] else: if len(tx_hexes) == 1: return tx_hexes[0] else: return tx_hexes def construct (db, tx_info, encoding='auto', fee_per_kb=config.DEFAULT_FEE_PER_KB, estimate_fee_per_kb=None, estimate_fee_per_kb_conf_target=config.ESTIMATE_FEE_CONF_TARGET, estimate_fee_per_kb_mode=config.ESTIMATE_FEE_MODE, estimate_fee_per_kb_nblocks=config.ESTIMATE_FEE_CONF_TARGET, regular_dust_size=config.DEFAULT_REGULAR_DUST_SIZE, multisig_dust_size=config.DEFAULT_MULTISIG_DUST_SIZE, op_return_value=config.DEFAULT_OP_RETURN_VALUE, exact_fee=None, fee_provided=0, provided_pubkeys=None, dust_return_pubkey=None, allow_unconfirmed_inputs=False, unspent_tx_hash=None, custom_inputs=None, disable_utxo_locks=False, extended_tx_info=False, old_style_api=None, segwit=False, p2sh_source_multisig_pubkeys=None, p2sh_source_multisig_pubkeys_required=None, p2sh_pretx_txid=None,): if estimate_fee_per_kb is None: estimate_fee_per_kb = config.ESTIMATE_FEE_PER_KB global UTXO_LOCKS, UTXO_P2SH_ENCODING_LOCKS # lazy assign from config, because when set as default it's evaluated before it's configured if old_style_api is None: old_style_api = config.OLD_STYLE_API (source, destination_outputs, data) = tx_info if dust_return_pubkey: dust_return_pubkey = binascii.unhexlify(dust_return_pubkey) if p2sh_source_multisig_pubkeys: p2sh_source_multisig_pubkeys = [binascii.unhexlify(p) for p in p2sh_source_multisig_pubkeys] # Source. # If public key is necessary for construction of (unsigned) # transaction, use the public key provided, or find it from the # blockchain. if source: script.validate(source) source_is_p2sh = script.is_p2sh(source) # Normalize source if script.is_multisig(source): source_address = backend.multisig_pubkeyhashes_to_pubkeys(source, provided_pubkeys) else: source_address = source # Sanity checks. if exact_fee and not isinstance(exact_fee, int): raise exceptions.TransactionError('Exact fees must be in satoshis.') if not isinstance(fee_provided, int): raise exceptions.TransactionError('Fee provided must be in satoshis.') '''Determine encoding method''' if data: desired_encoding = encoding # Data encoding methods (choose and validate). if desired_encoding == 'auto': if len(data) + len(config.PREFIX) <= config.OP_RETURN_MAX_SIZE: encoding = 'opreturn' else: encoding = 'p2sh' if not old_style_api and util.enabled('p2sh_encoding') else 'multisig' # p2sh is not possible with old_style_api elif desired_encoding == 'p2sh' and not util.enabled('p2sh_encoding'): raise exceptions.TransactionError('P2SH encoding not enabled yet') elif encoding not in ('pubkeyhash', 'multisig', 'opreturn', 'p2sh'): raise exceptions.TransactionError('Unknown encoding‐scheme.') else: # no data encoding = None '''Destinations''' # Destination outputs. # Replace multi‐sig addresses with multi‐sig pubkeys. Check that the # destination output isn’t a dust output. Set null values to dust size. destination_outputs_new = [] if encoding != 'p2sh': for (address, value) in destination_outputs: # Value. if script.is_multisig(address): dust_size = multisig_dust_size else: dust_size = regular_dust_size if value == None: value = dust_size elif value < dust_size: raise exceptions.TransactionError('Destination output is dust.') # Address. script.validate(address) if script.is_multisig(address): destination_outputs_new.append((backend.multisig_pubkeyhashes_to_pubkeys(address, provided_pubkeys), value)) else: destination_outputs_new.append((address, value)) destination_outputs = destination_outputs_new destination_btc_out = sum([value for address, value in destination_outputs]) '''Data''' if data: # @TODO: p2sh encoding require signable dust key if encoding == 'multisig': # dust_return_pubkey should be set or explicitly set to False to use the default configured for the node # the default for the node is optional so could fail if (source_is_p2sh and dust_return_pubkey is None) or (dust_return_pubkey is False and config.P2SH_DUST_RETURN_PUBKEY is None): raise exceptions.TransactionError("Can't use multisig encoding when source is P2SH and no dust_return_pubkey is provided.") elif dust_return_pubkey is False: dust_return_pubkey = binascii.unhexlify(config.P2SH_DUST_RETURN_PUBKEY) if not dust_return_pubkey: if encoding == 'multisig' or encoding == 'p2sh' and not source_is_p2sh: dust_return_pubkey = get_dust_return_pubkey(source, provided_pubkeys, encoding) else: dust_return_pubkey = None # Divide data into chunks. if encoding == 'pubkeyhash': # Prefix is also a suffix here. chunk_size = 20 - 1 - 8 elif encoding == 'multisig': # Two pubkeys, minus length byte, minus prefix, minus two nonces, # minus two sign bytes. chunk_size = (33 * 2) - 1 - 8 - 2 - 2 elif encoding == 'p2sh': pubkeylength = -1 if dust_return_pubkey is not None: pubkeylength = len(dust_return_pubkey) chunk_size = p2sh_encoding.maximum_data_chunk_size(pubkeylength) elif encoding == 'opreturn': chunk_size = config.OP_RETURN_MAX_SIZE if len(data) + len(config.PREFIX) > chunk_size: raise exceptions.TransactionError('One `OP_RETURN` output per transaction.') data_array = list(chunks(data, chunk_size)) # Data outputs. if encoding == 'multisig': data_value = multisig_dust_size elif encoding == 'p2sh': data_value = 0 # this will be calculated later elif encoding == 'opreturn': data_value = op_return_value else: # Pay‐to‐PubKeyHash, e.g. data_value = regular_dust_size data_output = (data_array, data_value) else: data_value = 0 data_array = [] data_output = None dust_return_pubkey = None data_btc_out = data_value * len(data_array) logger.getChild('p2shdebug').debug('data_btc_out=%s (data_value=%d len(data_array)=%d)' % (data_btc_out, data_value, len(data_array))) '''Inputs''' btc_in = 0 final_fee = 0 # Calculate collective size of outputs, for fee calculation. p2pkhsize = 25 + 9 if encoding == 'multisig': data_output_size = 81 # 71 for the data elif encoding == 'opreturn': # prefix + data + 10 bytes script overhead data_output_size = len(config.PREFIX) + 10 if data is not None: data_output_size = data_output_size + len(data) else: data_output_size = p2pkhsize # Pay‐to‐PubKeyHash (25 for the data?) outputs_size = (p2pkhsize * len(destination_outputs)) + (len(data_array) * data_output_size) if encoding == 'p2sh': # calculate all the p2sh outputs size_for_fee, datatx_necessary_fee, data_value, data_btc_out = p2sh_encoding.calculate_outputs(destination_outputs, data_array, fee_per_kb, exact_fee) # replace the data value data_output = (data_array, data_value) else: sum_data_output_size = len(data_array) * data_output_size size_for_fee = ((25 + 9) * len(destination_outputs)) + sum_data_output_size if not (encoding == 'p2sh' and p2sh_pretx_txid): inputs, change_quantity, n_btc_in, n_final_fee = construct_coin_selection( encoding, data_array, source, allow_unconfirmed_inputs, unspent_tx_hash, custom_inputs, fee_per_kb, estimate_fee_per_kb, estimate_fee_per_kb_nblocks, exact_fee, size_for_fee, fee_provided, destination_btc_out, data_btc_out, regular_dust_size, disable_utxo_locks ) btc_in = n_btc_in final_fee = n_final_fee else: # when encoding is P2SH and the pretx txid is passed we can skip coinselection inputs, change_quantity = None, None '''Finish''' if change_quantity: change_output = (source_address, change_quantity) else: change_output = None unsigned_pretx_hex = None unsigned_tx_hex = None pretx_txid = None if encoding == 'p2sh': assert not (segwit and p2sh_pretx_txid) # shouldn't do old style with segwit enabled if p2sh_pretx_txid: pretx_txid = p2sh_pretx_txid if isinstance(p2sh_pretx_txid, bytes) else binascii.unhexlify(p2sh_pretx_txid) unsigned_pretx = None else: destination_value_sum = sum([value for (destination, value) in destination_outputs]) source_value = destination_value_sum if change_output: # add the difference between source and destination to the change change_value = change_output[1] + (destination_value_sum - source_value) change_output = (change_output[0], change_value) unsigned_pretx = serializer.serialise_p2sh_pretx(inputs, source=source_address, source_value=source_value, data_output=data_output, change_output=change_output, pubkey=dust_return_pubkey, multisig_pubkeys=p2sh_source_multisig_pubkeys, multisig_pubkeys_required=p2sh_source_multisig_pubkeys_required) unsigned_pretx_hex = binascii.hexlify(unsigned_pretx).decode('utf-8') # with segwit we already know the txid and can return both if segwit: #pretx_txid = hashlib.sha256(unsigned_pretx).digest() # this should be segwit txid ptx = CTransaction.stream_deserialize(io.BytesIO(unsigned_pretx)) # could be a non-segwit tx anyways txid_ba = bytearray(ptx.GetTxid()) txid_ba.reverse() pretx_txid = bytes(txid_ba) # gonna leave the malleability problem to upstream logger.getChild('p2shdebug').debug('pretx_txid %s' % pretx_txid) print('pretx txid:', binascii.hexlify(pretx_txid)) if unsigned_pretx: # we set a long lock on this, don't want other TXs to spend from it UTXO_P2SH_ENCODING_LOCKS[make_outkey_vin(unsigned_pretx, 0)] = True # only generate the data TX if we have the pretx txId if pretx_txid: source_input = None if script.is_p2sh(source): source_input = select_any_coin_from_source(source) if not source_input: raise exceptions.TransactionError('Unable to select source input for p2sh source address') unsigned_datatx = serializer.serialise_p2sh_datatx(pretx_txid, source=source_address, source_input=source_input, destination_outputs=destination_outputs, data_output=data_output, pubkey=dust_return_pubkey, multisig_pubkeys=p2sh_source_multisig_pubkeys, multisig_pubkeys_required=p2sh_source_multisig_pubkeys_required) unsigned_datatx_hex = binascii.hexlify(unsigned_datatx).decode('utf-8') # let the rest of the code work it's magic on the data tx unsigned_tx_hex = unsigned_datatx_hex else: # we're just gonna return the pretx, it doesn't require any of the further checks logger.warn('old_style_api = %s' % old_style_api) return return_result([unsigned_pretx_hex], old_style_api=old_style_api) else: # Serialise inputs and outputs. unsigned_tx = serializer.serialise(encoding, inputs, destination_outputs, data_output, change_output, dust_return_pubkey=dust_return_pubkey) unsigned_tx_hex = binascii.hexlify(unsigned_tx).decode('utf-8') '''Sanity Check''' # Desired transaction info. (desired_source, desired_destination_outputs, desired_data) = tx_info desired_source = script.make_canonical(desired_source) desired_destination = script.make_canonical(desired_destination_outputs[0][0]) if desired_destination_outputs else '' # NOTE: Include change in destinations for BTC transactions. # if change_output and not desired_data and desired_destination != config.UNSPENDABLE: # if desired_destination == '': # desired_destination = desired_source # else: # desired_destination += '-{}'.format(desired_source) # NOTE if desired_data == None: desired_data = b'' # Parsed transaction info. try: if pretx_txid and unsigned_pretx: backend.cache_pretx(pretx_txid, unsigned_pretx) parsed_source, parsed_destination, x, y, parsed_data, extra = blocks._get_tx_info(unsigned_tx_hex, p2sh_is_segwit=script.is_bech32(desired_source)) if encoding == 'p2sh': # make_canonical can't determine the address, so we blindly change the desired to the parsed desired_source = parsed_source if pretx_txid and unsigned_pretx: backend.clear_pretx(pretx_txid) except exceptions.BTCOnlyError: # Skip BTC‐only transactions. if extended_tx_info: return { 'btc_in': btc_in, 'btc_out': destination_btc_out + data_btc_out, 'btc_change': change_quantity, 'btc_fee': final_fee, 'tx_hex': unsigned_tx_hex, } logger.getChild('p2shdebug').debug('BTC-ONLY') return return_result([unsigned_pretx_hex, unsigned_tx_hex], old_style_api=old_style_api) desired_source = script.make_canonical(desired_source) # Check desired info against parsed info. desired = (desired_source, desired_destination, desired_data) parsed = (parsed_source, parsed_destination, parsed_data) if desired != parsed: # Unlock (revert) UTXO locks if UTXO_LOCKS is not None and inputs: for input in inputs: UTXO_LOCKS[source].pop(make_outkey(input), None) raise exceptions.TransactionError('Constructed transaction does not parse correctly: {} ≠ {}'.format(desired, parsed)) if extended_tx_info: return { 'btc_in': btc_in, 'btc_out': destination_btc_out + data_btc_out, 'btc_change': change_quantity, 'btc_fee': final_fee, 'tx_hex': unsigned_tx_hex, } return return_result([unsigned_pretx_hex, unsigned_tx_hex], old_style_api=old_style_api) def normalize_custom_inputs(raw_custom_inputs): custom_inputs = [] for custom_input in raw_custom_inputs: if 'value' not in custom_input: custom_input['value'] = int(custom_input['amount'] * config.UNIT) custom_inputs.append(custom_input) return custom_inputs # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 #### util.py import time import decimal import sys import json import logging logger = logging.getLogger(__name__) import apsw import inspect import requests from datetime import datetime from dateutil.tz import tzlocal from operator import itemgetter import fractions import warnings import binascii import re import hashlib import sha3 import bitcoin as bitcoinlib import os import collections import threading import random import itertools from counterpartylib.lib import exceptions from counterpartylib.lib.exceptions import DecodeError from counterpartylib.lib import config D = decimal.Decimal B26_DIGITS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' # subasset contain only characters a-zA-Z0-9.-_@! SUBASSET_DIGITS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789.-_@!' SUBASSET_REVERSE = {'a':1,'b':2,'c':3,'d':4,'e':5,'f':6,'g':7,'h':8,'i':9,'j':10,'k':11,'l':12,'m':13,'n':14, 'o':15,'p':16,'q':17,'r':18,'s':19,'t':20,'u':21,'v':22,'w':23,'x':24,'y':25,'z':26, 'A':27,'B':28,'C':29,'D':30,'E':31,'F':32,'G':33,'H':34,'I':35,'J':36,'K':37,'L':38,'M':39, 'N':40,'O':41,'P':42,'Q':43,'R':44,'S':45,'T':46,'U':47,'V':48,'W':49,'X':50,'Y':51,'Z':52, '0':53,'1':54,'2':55,'3':56,'4':57,'5':58,'6':59,'7':60,'8':61,'9':62,'.':63,'-':64,'_':65,'@':66,'!':67} # Obsolete in Python 3.4, with enum module. BET_TYPE_NAME = {0: 'BullCFD', 1: 'BearCFD', 2: 'Equal', 3: 'NotEqual'} BET_TYPE_ID = {'BullCFD': 0, 'BearCFD': 1, 'Equal': 2, 'NotEqual': 3} json_dump = lambda x: json.dumps(x, sort_keys=True, indent=4) json_print = lambda x: print(json_dump(x)) BLOCK_LEDGER = [] CURRENT_BLOCK_INDEX = None CURR_DIR = os.path.dirname(os.path.realpath(__file__)) with open(CURR_DIR + '/../protocol_changes.json') as f: PROTOCOL_CHANGES = json.load(f) class RPCError (Exception): pass # TODO: Move to `util_test.py`. # TODO: This doesn’t timeout properly. (If server hangs, then unhangs, no result.) def api(method, params): """Poll API via JSON-RPC.""" headers = {'content-type': 'application/json'} payload = { "method": method, "params": params, "jsonrpc": "2.0", "id": 0, } response = requests.post(config.RPC, data=json.dumps(payload), headers=headers) if response == None: raise RPCError('Cannot communicate with {} server.'.format(config.XCP_NAME)) elif response.status_code != 200: if response.status_code == 500: raise RPCError('Malformed API call.') else: raise RPCError(str(response.status_code) + ' ' + response.reason) response_json = response.json() if 'error' not in response_json.keys() or response_json['error'] == None: try: return response_json['result'] except KeyError: raise RPCError(response_json) else: raise RPCError('{} ({})'.format(response_json['error']['message'], response_json['error']['code'])) def chunkify(l, n): n = max(1, n) return [l[i:i + n] for i in range(0, len(l), n)] def flat(z): return [x for x in z] def py34TupleAppend(first_elem, t): # Had to do it this way to support python 3.4, if we start # using the 3.5 runtime this can be replaced by: # (first_elem, *t) l = list(t) l.insert(0, first_elem) return tuple(l) def accumulate(l): it = itertools.groupby(l, itemgetter(0)) for key, subiter in it: yield key, sum(item[1] for item in subiter) def date_passed(date): """Check if the date has already passed.""" return date <= int(time.time()) def price (numerator, denominator): """Return price as Fraction or Decimal.""" if CURRENT_BLOCK_INDEX >= 294500 or config.TESTNET or config.REGTEST: # Protocol change. return fractions.Fraction(numerator, denominator) else: numerator = D(numerator) denominator = D(denominator) return D(numerator / denominator) def last_message(db): """Return latest message from the db.""" cursor = db.cursor() messages = list(cursor.execute('''SELECT * FROM messages WHERE message_index = (SELECT MAX(message_index) from messages)''')) if messages: assert len(messages) == 1 last_message = messages[0] else: raise exceptions.DatabaseError('No messages found.') cursor.close() return last_message def generate_asset_id(asset_name, block_index): """Create asset_id from asset_name.""" if asset_name == config.BTC: return 0 elif asset_name == config.XCP: return 1 if len(asset_name) < 4: raise exceptions.AssetNameError('too short') # Numeric asset names. if enabled('numeric_asset_names'): # Protocol change. if asset_name[0] == 'A': # Must be numeric. try: asset_id = int(asset_name[1:]) except ValueError: raise exceptions.AssetNameError('non‐numeric asset name starts with ‘A’') # Number must be in range. if not (26**12 + 1 <= asset_id <= 2**64 - 1): raise exceptions.AssetNameError('numeric asset name not in range') return asset_id elif len(asset_name) >= 13: raise exceptions.AssetNameError('long asset names must be numeric') if asset_name[0] == 'A': raise exceptions.AssetNameError('non‐numeric asset name starts with ‘A’') # Convert the Base 26 string to an integer. n = 0 for c in asset_name: n *= 26 if c not in B26_DIGITS: raise exceptions.AssetNameError('invalid character:', c) digit = B26_DIGITS.index(c) n += digit asset_id = n if asset_id < 26**3: raise exceptions.AssetNameError('too short') return asset_id def generate_asset_name (asset_id, block_index): """Create asset_name from asset_id.""" if asset_id == 0: return config.BTC elif asset_id == 1: return config.XCP if asset_id < 26**3: raise exceptions.AssetIDError('too low') if enabled('numeric_asset_names'): # Protocol change. if asset_id <= 2**64 - 1: if 26**12 + 1 <= asset_id: asset_name = 'A' + str(asset_id) return asset_name else: raise exceptions.AssetIDError('too high') # Divide that integer into Base 26 string. res = [] n = asset_id while n > 0: n, r = divmod (n, 26) res.append(B26_DIGITS[r]) asset_name = ''.join(res[::-1]) """ return asset_name + checksum.compute(asset_name) """ return asset_name def get_asset_id (db, asset_name, block_index): """Return asset_id from asset_name.""" if not enabled('hotfix_numeric_assets'): return generate_asset_id(asset_name, block_index) cursor = db.cursor() cursor.execute('''SELECT * FROM assets WHERE asset_name = ?''', (asset_name,)) assets = list(cursor) if len(assets) == 1: return int(assets[0]['asset_id']) else: raise exceptions.AssetError('No such asset: {}'.format(asset_name)) def get_asset_name (db, asset_id, block_index): """Return asset_name from asset_id.""" if not enabled('hotfix_numeric_assets'): return generate_asset_name(asset_id, block_index) cursor = db.cursor() cursor.execute('''SELECT * FROM assets WHERE asset_id = ?''', (str(asset_id),)) assets = list(cursor) if len(assets) == 1: return assets[0]['asset_name'] elif not assets: return 0 # Strange, I know… # If asset_name is an existing subasset (PARENT.child) then return the corresponding numeric asset name (A12345) # If asset_name is not an existing subasset, then return the unmodified asset_name def resolve_subasset_longname(db, asset_name): if enabled('subassets'): subasset_longname = None try: subasset_parent, subasset_longname = parse_subasset_from_asset_name(asset_name) except Exception as e: logger.warn("Invalid subasset {}".format(asset_name)) subasset_longname = None if subasset_longname is not None: cursor = db.cursor() cursor.execute('''SELECT asset_name FROM assets WHERE asset_longname = ?''', (subasset_longname,)) assets = list(cursor) cursor.close() if len(assets) == 1: return assets[0]['asset_name'] return asset_name # checks and validates subassets (PARENT.SUBASSET) # throws exceptions for assset or subasset names with invalid syntax # returns (None, None) if the asset is not a subasset name def parse_subasset_from_asset_name(asset): subasset_parent = None subasset_child = None subasset_longname = None chunks = asset.split('.', 1) if (len(chunks) == 2): subasset_parent = chunks[0] subasset_child = chunks[1] subasset_longname = asset # validate parent asset validate_subasset_parent_name(subasset_parent) # validate child asset validate_subasset_longname(subasset_longname, subasset_child) return (subasset_parent, subasset_longname) # throws exceptions for invalid subasset names def validate_subasset_longname(subasset_longname, subasset_child=None): if subasset_child is None: chunks = subasset_longname.split('.', 1) if (len(chunks) == 2): subasset_child = chunks[1] else: subasset_child = '' if len(subasset_child) < 1: raise exceptions.AssetNameError('subasset name too short') if len(subasset_longname) > 250: raise exceptions.AssetNameError('subasset name too long') # can't start with period, can't have consecutive periods, can't contain anything not in SUBASSET_DIGITS previous_digit = '.' for c in subasset_child: if c not in SUBASSET_DIGITS: raise exceptions.AssetNameError('subasset name contains invalid character:', c) if c == '.' and previous_digit == '.': raise exceptions.AssetNameError('subasset name contains consecutive periods') previous_digit = c if previous_digit == '.': raise exceptions.AssetNameError('subasset name ends with a period') return True # throws exceptions for invalid subasset names def validate_subasset_parent_name(asset_name): if asset_name == config.BTC: raise exceptions.AssetNameError('parent asset cannot be {}'.format(config.BTC)) if asset_name == config.XCP: raise exceptions.AssetNameError('parent asset cannot be {}'.format(config.XCP)) if len(asset_name) < 4: raise exceptions.AssetNameError('parent asset name too short') if len(asset_name) >= 13: raise exceptions.AssetNameError('parent asset name too long') if asset_name[0] == 'A': raise exceptions.AssetNameError('parent asset name starts with ‘A’') for c in asset_name: if c not in B26_DIGITS: raise exceptions.AssetNameError('parent asset name contains invalid character:', c) return True def compact_subasset_longname(string): """Compacts a subasset name string into an array of bytes to save space using a base68 encoding scheme. Assumes all characters provided belong to SUBASSET_DIGITS. """ name_int = 0 for i, c in enumerate(string[::-1]): name_int += (68 ** i) * SUBASSET_REVERSE[c] return name_int.to_bytes((name_int.bit_length() + 7) // 8, byteorder='big') def expand_subasset_longname(raw_bytes): """Expands an array of bytes into a subasset name string.""" integer = int.from_bytes(raw_bytes, byteorder='big') if integer == 0: return '' ret = '' while integer != 0: ret = SUBASSET_DIGITS[integer % 68 - 1] + ret integer //= 68 return ret def generate_random_asset (): return 'A' + str(random.randint(26**12 + 1, 2**64 - 1)) def parse_options_from_string(string): """Parse options integer from string, if exists.""" string_list = string.split(" ") if len(string_list) == 2: try: options = int(string_list.pop()) except: raise exceptions.OptionsError('options not an integer') return options else: return False def validate_address_options(options): """Ensure the options are all valid and in range.""" if (options > config.MAX_INT) or (options < 0): raise exceptions.OptionsError('options integer overflow') elif options > config.ADDRESS_OPTION_MAX_VALUE: raise exceptions.OptionsError('options out of range') elif not active_options(config.ADDRESS_OPTION_MAX_VALUE, options): raise exceptions.OptionsError('options not possible') def active_options(config, options): """Checks if options active in some given config.""" return config & options == options class DebitError (Exception): pass def debit (db, address, asset, quantity, action=None, event=None): """Debit given address by quantity of asset.""" block_index = CURRENT_BLOCK_INDEX if type(quantity) != int: raise DebitError('Quantity must be an integer.') if quantity < 0: raise DebitError('Negative quantity.') if quantity > config.MAX_INT: raise DebitError('Quantity can\'t be higher than MAX_INT.') if asset == config.BTC: raise DebitError('Cannot debit bitcoins.') debit_cursor = db.cursor() # Contracts can only hold XCP balances. if enabled('contracts_only_xcp_balances'): # Protocol change. if len(address) == 40: assert asset == config.XCP if asset == config.BTC: raise exceptions.BalanceError('Cannot debit bitcoins from a {} address!'.format(config.XCP_NAME)) debit_cursor.execute('''SELECT * FROM balances \ WHERE (address = ? AND asset = ?)''', (address, asset)) balances = debit_cursor.fetchall() if not len(balances) == 1: old_balance = 0 else: old_balance = balances[0]['quantity'] if old_balance < quantity: raise DebitError('Insufficient funds.') balance = round(old_balance - quantity) balance = min(balance, config.MAX_INT) assert balance >= 0 bindings = { 'quantity': balance, 'address': address, 'asset': asset } sql='update balances set quantity = :quantity where (address = :address and asset = :asset)' debit_cursor.execute(sql, bindings) # Record debit. bindings = { 'block_index': block_index, 'address': address, 'asset': asset, 'quantity': quantity, 'action': action, 'event': event } sql='insert into debits values(:block_index, :address, :asset, :quantity, :action, :event)' debit_cursor.execute(sql, bindings) debit_cursor.close() BLOCK_LEDGER.append('{}{}{}{}'.format(block_index, address, asset, quantity)) class CreditError (Exception): pass def credit (db, address, asset, quantity, action=None, event=None): """Credit given address by quantity of asset.""" block_index = CURRENT_BLOCK_INDEX if type(quantity) != int: raise CreditError('Quantity must be an integer.') if quantity < 0: raise CreditError('Negative quantity.') if quantity > config.MAX_INT: raise CreditError('Quantity can\'t be higher than MAX_INT.') if asset == config.BTC: raise CreditError('Cannot debit bitcoins.') credit_cursor = db.cursor() # Contracts can only hold XCP balances. if enabled('contracts_only_xcp_balances'): # Protocol change. if len(address) == 40: assert asset == config.XCP credit_cursor.execute('''SELECT * FROM balances \ WHERE (address = ? AND asset = ?)''', (address, asset)) balances = credit_cursor.fetchall() if len(balances) == 0: assert balances == [] #update balances table with new balance bindings = { 'address': address, 'asset': asset, 'quantity': quantity, } sql='insert into balances values(:address, :asset, :quantity)' credit_cursor.execute(sql, bindings) elif len(balances) > 1: assert False else: old_balance = balances[0]['quantity'] assert type(old_balance) == int balance = round(old_balance + quantity) balance = min(balance, config.MAX_INT) bindings = { 'quantity': balance, 'address': address, 'asset': asset } sql='update balances set quantity = :quantity where (address = :address and asset = :asset)' credit_cursor.execute(sql, bindings) # Record credit. bindings = { 'block_index': block_index, 'address': address, 'asset': asset, 'quantity': quantity, 'action': action, 'event': event } sql='insert into credits values(:block_index, :address, :asset, :quantity, :action, :event)' credit_cursor.execute(sql, bindings) credit_cursor.close() BLOCK_LEDGER.append('{}{}{}{}'.format(block_index, address, asset, quantity)) class QuantityError(Exception): pass def is_divisible(db, asset): """Check if the asset is divisible.""" if asset in (config.BTC, config.XCP): return True else: cursor = db.cursor() cursor.execute('''SELECT * FROM issuances \ WHERE (status = ? AND asset = ?) ORDER BY tx_index DESC''', ('valid', asset)) issuances = cursor.fetchall() if not issuances: raise exceptions.AssetError('No such asset: {}'.format(asset)) return issuances[0]['divisible'] def value_input(quantity, asset, divisible): if asset == 'leverage': return round(quantity) if asset in ('value', 'fraction', 'price', 'odds'): return float(quantity) # TODO: Float?! if divisible: quantity = D(quantity) * config.UNIT if quantity == quantity.to_integral(): return int(quantity) else: raise QuantityError('Divisible assets have only eight decimal places of precision.') else: quantity = D(quantity) if quantity != round(quantity): raise QuantityError('Fractional quantities of indivisible assets.') return round(quantity) def value_in(db, quantity, asset, divisible=None): if asset not in ['leverage', 'value', 'fraction', 'price', 'odds'] and divisible == None: divisible = is_divisible(db, asset) return value_input(quantity, asset, divisible) def value_output(quantity, asset, divisible): def norm(num, places): """Round only if necessary.""" num = round(num, places) fmt = '{:.' + str(places) + 'f}' num = fmt.format(num) return num.rstrip('0')+'0' if num.rstrip('0')[-1] == '.' else num.rstrip('0') if asset == 'fraction': return str(norm(D(quantity) * D(100), 6)) + '%' if asset in ('leverage', 'value', 'price', 'odds'): return norm(quantity, 6) if divisible: quantity = D(quantity) / D(config.UNIT) if quantity == quantity.to_integral(): return str(quantity) + '.0' # For divisible assets, display the decimal point. else: return norm(quantity, 8) else: quantity = D(quantity) if quantity != round(quantity): raise QuantityError('Fractional quantities of indivisible assets.') return round(quantity) def value_out(db, quantity, asset, divisible=None): if asset not in ['leverage', 'value', 'fraction', 'price', 'odds'] and divisible == None: divisible = is_divisible(db, asset) return value_output(quantity, asset, divisible) ### SUPPLIES ### def holders(db, asset, exclude_empty_holders=False): """Return holders of the asset.""" holders = [] cursor = db.cursor() # Balances if exclude_empty_holders: cursor.execute('''SELECT * FROM balances \ WHERE asset = ? AND quantity > ?''', (asset, 0)) else: cursor.execute('''SELECT * FROM balances \ WHERE asset = ?''', (asset, )) for balance in list(cursor): holders.append({'address': balance['address'], 'address_quantity': balance['quantity'], 'escrow': None}) # Funds escrowed in orders. (Protocol change.) cursor.execute('''SELECT * FROM orders \ WHERE give_asset = ? AND status = ?''', (asset, 'open')) for order in list(cursor): holders.append({'address': order['source'], 'address_quantity': order['give_remaining'], 'escrow': order['tx_hash']}) # Funds escrowed in pending order matches. (Protocol change.) cursor.execute('''SELECT * FROM order_matches \ WHERE (forward_asset = ? AND status = ?)''', (asset, 'pending')) for order_match in list(cursor): holders.append({'address': order_match['tx0_address'], 'address_quantity': order_match['forward_quantity'], 'escrow': order_match['id']}) cursor.execute('''SELECT * FROM order_matches \ WHERE (backward_asset = ? AND status = ?)''', (asset, 'pending')) for order_match in list(cursor): holders.append({'address': order_match['tx1_address'], 'address_quantity': order_match['backward_quantity'], 'escrow': order_match['id']}) # Bets and RPS (and bet/rps matches) only escrow XCP. if asset == config.XCP: cursor.execute('''SELECT * FROM bets \ WHERE status = ?''', ('open',)) for bet in list(cursor): holders.append({'address': bet['source'], 'address_quantity': bet['wager_remaining'], 'escrow': bet['tx_hash']}) cursor.execute('''SELECT * FROM bet_matches \ WHERE status = ?''', ('pending',)) for bet_match in list(cursor): holders.append({'address': bet_match['tx0_address'], 'address_quantity': bet_match['forward_quantity'], 'escrow': bet_match['id']}) holders.append({'address': bet_match['tx1_address'], 'address_quantity': bet_match['backward_quantity'], 'escrow': bet_match['id']}) cursor.execute('''SELECT * FROM rps \ WHERE status = ?''', ('open',)) for rps in list(cursor): holders.append({'address': rps['source'], 'address_quantity': rps['wager'], 'escrow': rps['tx_hash']}) cursor.execute('''SELECT * FROM rps_matches \ WHERE status IN (?, ?, ?)''', ('pending', 'pending and resolved', 'resolved and pending')) for rps_match in list(cursor): holders.append({'address': rps_match['tx0_address'], 'address_quantity': rps_match['wager'], 'escrow': rps_match['id']}) holders.append({'address': rps_match['tx1_address'], 'address_quantity': rps_match['wager'], 'escrow': rps_match['id']}) if enabled('dispensers_in_holders'): # Funds escrowed in dispensers. cursor.execute('''SELECT * FROM dispensers \ WHERE asset = ? AND status = ?''', (asset, 0)) for dispenser in list(cursor): holders.append({'address': dispenser['source'], 'address_quantity': dispenser['give_remaining'], 'escrow': None}) cursor.close() return holders def xcp_created (db): """Return number of XCP created thus far.""" cursor = db.cursor() cursor.execute('''SELECT SUM(earned) AS total FROM burns \ WHERE (status = ?)''', ('valid',)) total = list(cursor)[0]['total'] or 0 cursor.close() return total def xcp_destroyed (db): """Return number of XCP destroyed thus far.""" cursor = db.cursor() # Destructions cursor.execute('''SELECT SUM(quantity) AS total FROM destructions \ WHERE (status = ? AND asset = ?)''', ('valid', config.XCP)) destroyed_total = list(cursor)[0]['total'] or 0 # Subtract issuance fees. cursor.execute('''SELECT SUM(fee_paid) AS total FROM issuances\ WHERE status = ?''', ('valid',)) issuance_fee_total = list(cursor)[0]['total'] or 0 # Subtract dividend fees. cursor.execute('''SELECT SUM(fee_paid) AS total FROM dividends\ WHERE status = ?''', ('valid',)) dividend_fee_total = list(cursor)[0]['total'] or 0 # Subtract sweep fees. cursor.execute('''SELECT SUM(fee_paid) AS total FROM sweeps\ WHERE status = ?''', ('valid',)) sweeps_fee_total = list(cursor)[0]['total'] or 0 cursor.close() return destroyed_total + issuance_fee_total + dividend_fee_total + sweeps_fee_total def xcp_supply (db): """Return the XCP supply.""" return xcp_created(db) - xcp_destroyed(db) def creations (db): """Return creations.""" cursor = db.cursor() creations = {config.XCP: xcp_created(db)} cursor.execute('''SELECT asset, SUM(quantity) AS created FROM issuances \ WHERE status = ? GROUP BY asset''', ('valid',)) for issuance in cursor: asset = issuance['asset'] created = issuance['created'] creations[asset] = created cursor.close() return creations def destructions (db): """Return destructions.""" cursor = db.cursor() destructions = {config.XCP: xcp_destroyed(db)} cursor.execute('''SELECT asset, SUM(quantity) AS destroyed FROM destructions \ WHERE (status = ? AND asset != ?) GROUP BY asset''', ('valid', config.XCP)) for destruction in cursor: asset = destruction['asset'] destroyed = destruction['destroyed'] destructions[asset] = destroyed cursor.close() return destructions def asset_issued_total (db, asset): """Return asset total issued.""" cursor = db.cursor() cursor.execute('''SELECT SUM(quantity) AS total FROM issuances \ WHERE (status = ? AND asset = ?)''', ('valid', asset)) issued_total = list(cursor)[0]['total'] or 0 cursor.close() return issued_total def asset_destroyed_total (db, asset): """Return asset total destroyed.""" cursor = db.cursor() cursor.execute('''SELECT SUM(quantity) AS total FROM destructions \ WHERE (status = ? AND asset = ?)''', ('valid', asset)) destroyed_total = list(cursor)[0]['total'] or 0 cursor.close() return destroyed_total def asset_supply (db, asset): """Return asset supply.""" return asset_issued_total(db, asset) - asset_destroyed_total(db, asset) def supplies (db): """Return supplies.""" d1 = creations(db) d2 = destructions(db) return {key: d1[key] - d2.get(key, 0) for key in d1.keys()} def held (db): #TODO: Rename ? queries = [ "SELECT asset, SUM(quantity) AS total FROM balances GROUP BY asset", "SELECT give_asset AS asset, SUM(give_remaining) AS total FROM orders WHERE status = 'open' GROUP BY asset", "SELECT give_asset AS asset, SUM(give_remaining) AS total FROM orders WHERE status = 'filled' and give_asset = 'XCP' and get_asset = 'BTC' GROUP BY asset", "SELECT forward_asset AS asset, SUM(forward_quantity) AS total FROM order_matches WHERE status = 'pending' GROUP BY asset", "SELECT backward_asset AS asset, SUM(backward_quantity) AS total FROM order_matches WHERE status = 'pending' GROUP BY asset", "SELECT 'XCP' AS asset, SUM(wager_remaining) AS total FROM bets WHERE status = 'open'", "SELECT 'XCP' AS asset, SUM(forward_quantity) AS total FROM bet_matches WHERE status = 'pending'", "SELECT 'XCP' AS asset, SUM(backward_quantity) AS total FROM bet_matches WHERE status = 'pending'", "SELECT 'XCP' AS asset, SUM(wager) AS total FROM rps WHERE status = 'open'", "SELECT 'XCP' AS asset, SUM(wager * 2) AS total FROM rps_matches WHERE status IN ('pending', 'pending and resolved', 'resolved and pending')", "SELECT asset, SUM(give_remaining) AS total FROM dispensers WHERE status=0 OR status=1 GROUP BY asset", ] sql = "SELECT asset, SUM(total) AS total FROM (" + " UNION ALL ".join(queries) + ") GROUP BY asset;" cursor = db.cursor() cursor.execute(sql) held = {} for row in cursor: asset = row['asset'] total = row['total'] held[asset] = total return held ### SUPPLIES ### class GetURLError (Exception): pass def get_url(url, abort_on_error=False, is_json=True, fetch_timeout=5): """Fetch URL using requests.get.""" try: r = requests.get(url, timeout=fetch_timeout) except Exception as e: raise GetURLError("Got get_url request error: %s" % e) else: if r.status_code != 200 and abort_on_error: raise GetURLError("Bad status code returned: '%s'. result body: '%s'." % (r.status_code, r.text)) result = json.loads(r.text) if is_json else r.text return result def dhash(text): if not isinstance(text, bytes): text = bytes(str(text), 'utf-8') return hashlib.sha256(hashlib.sha256(text).digest()).digest() def dhash_string(text): return binascii.hexlify(dhash(text)).decode() def get_balance (db, address, asset): """Get balance of contract or address.""" cursor = db.cursor() balances = list(cursor.execute('''SELECT * FROM balances WHERE (address = ? AND asset = ?)''', (address, asset))) cursor.close() if not balances: return 0 else: return balances[0]['quantity'] # Why on Earth does `binascii.hexlify()` return bytes?! def hexlify(x): """Return the hexadecimal representation of the binary data. Decode from ASCII to UTF-8.""" return binascii.hexlify(x).decode('ascii') def unhexlify(hex_string): return binascii.unhexlify(bytes(hex_string, 'utf-8')) ### Protocol Changes ### def enabled(change_name, block_index=None): """Return True if protocol change is enabled.""" if config.REGTEST: return True # All changes are always enabled on REGTEST if config.TESTNET: index_name = 'testnet_block_index' else: index_name = 'block_index' enable_block_index = PROTOCOL_CHANGES[change_name][index_name] if not block_index: block_index = CURRENT_BLOCK_INDEX if block_index >= enable_block_index: return True else: return False def get_value_by_block_index(change_name, block_index=None): if not block_index: block_index = CURRENT_BLOCK_INDEX if config.REGTEST: max_block_index_testnet = -1 for key, value in PROTOCOL_CHANGES[change_name]["testnet"]: if int(key) > int(max_block_index): max_block_index = key return PROTOCOL_CHANGES[change_name]["testnet"][max_block_index]["value"] if config.TESTNET: index_name = 'testnet' else: index_name = 'mainnet' max_block_index = -1 for key in PROTOCOL_CHANGES[change_name][index_name]: if int(key) > int(max_block_index) and block_index >= int(key): max_block_index = key return PROTOCOL_CHANGES[change_name][index_name][max_block_index]["value"] def transfer(db, source, destination, asset, quantity, action, event): """Transfer quantity of asset from source to destination.""" debit(db, source, asset, quantity, action=action, event=event) credit(db, destination, asset, quantity, action=action, event=event) ID_SEPARATOR = '_' def make_id(hash_1, hash_2): return hash_1 + ID_SEPARATOR + hash_2 def parse_id(match_id): assert match_id[64] == ID_SEPARATOR return match_id[:64], match_id[65:] # UTF-8 encoding means that the indices are doubled. def sizeof(v): if isinstance(v, dict) or isinstance(v, DictCache): s = 0 for dk, dv in v.items(): s += sizeof(dk) s += sizeof(dv) return s else: return sys.getsizeof(v) class DictCache: """Threadsafe FIFO dict cache""" def __init__(self, size=100): if int(size) < 1 : raise AttributeError('size < 1 or not a number') self.size = size self.dict = collections.OrderedDict() self.lock = threading.Lock() def __getitem__(self,key): with self.lock: return self.dict[key] def __setitem__(self,key,value): with self.lock: while len(self.dict) >= self.size: self.dict.popitem(last=False) self.dict[key] = value def __delitem__(self,key): with self.lock: del self.dict[key] def __len__(self): with self.lock: return len(self.dict) def __contains__(self, key): with self.lock: return key in self.dict def refresh(self, key): with self.lock: self.dict.move_to_end(key, last=True) URL_USERNAMEPASS_REGEX = re.compile('.+://(.+)@') def clean_url_for_log(url): m = URL_USERNAMEPASS_REGEX.match(url) if m and m.group(1): url = url.replace(m.group(1), 'XXXXXXXX') return url # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 # ORACLES def satoshirate_to_fiat(satoshirate): return round(satoshirate/100.0,2) def get_oracle_last_price(db, oracle_address, block_index): cursor = db.cursor() cursor.execute('SELECT * FROM broadcasts WHERE source=:source AND status=:status AND block_index<:block_index ORDER by tx_index DESC LIMIT 1', { 'source': oracle_address, 'status': 'valid', 'block_index': block_index }) broadcasts = cursor.fetchall() cursor.close() if len(broadcasts) == 0: return None, None oracle_broadcast = broadcasts[0] oracle_label = oracle_broadcast["text"].split("-") if len(oracle_label) == 2: fiat_label = oracle_label[1] else: fiat_label = "" return oracle_broadcast['value'], oracle_broadcast['fee_fraction_int'], fiat_label, oracle_broadcast['block_index'] #### util_windows.py import sys import copy import logging import logging.handlers logger = logging.getLogger(__name__) import unicodedata import codecs from ctypes import WINFUNCTYPE, windll, POINTER, byref, c_int from ctypes.wintypes import BOOL, HANDLE, DWORD, LPWSTR, LPCWSTR, LPVOID class SanitizedRotatingFileHandler(logging.handlers.RotatingFileHandler): def emit(self, record): # If the message doesn't need to be rendered we take a shortcut. if record.levelno < self.level: return # Make sure the message is a string. message = record.msg #Sanitize and clean up the message message = unicodedata.normalize('NFKD', message).encode('ascii', 'ignore').decode() # Copy the original record so we don't break other handlers. record = copy.copy(record) record.msg = message # Use the built-in stream handler to handle output. logging.handlers.RotatingFileHandler.emit(self, record) def fix_win32_unicode(): """Thanks to http://stackoverflow.com/a/3259271 ! (converted to python3)""" if sys.platform != "win32": return original_stderr = sys.stderr # If any exception occurs in this code, we'll probably try to print it on stderr, # which makes for frustrating debugging if stderr is directed to our wrapper. # So be paranoid about catching errors and reporting them to original_stderr, # so that we can at least see them. def _complain(message): print(message if isinstance(message, str) else repr(message), file=original_stderr) # Work around . codecs.register(lambda name: codecs.lookup('utf-8') if name == 'cp65001' else None) # Make Unicode console output work independently of the current code page. # This also fixes . # Credit to Michael Kaplan # and TZOmegaTZIOY # . try: # # HANDLE WINAPI GetStdHandle(DWORD nStdHandle); # returns INVALID_HANDLE_VALUE, NULL, or a valid handle # # # DWORD WINAPI GetFileType(DWORD hFile); # # # BOOL WINAPI GetConsoleMode(HANDLE hConsole, LPDWORD lpMode); GetStdHandle = WINFUNCTYPE(HANDLE, DWORD)(("GetStdHandle", windll.kernel32)) STD_OUTPUT_HANDLE = DWORD(-11) STD_ERROR_HANDLE = DWORD(-12) GetFileType = WINFUNCTYPE(DWORD, DWORD)(("GetFileType", windll.kernel32)) FILE_TYPE_CHAR = 0x0002 FILE_TYPE_REMOTE = 0x8000 GetConsoleMode = WINFUNCTYPE(BOOL, HANDLE, POINTER(DWORD))(("GetConsoleMode", windll.kernel32)) INVALID_HANDLE_VALUE = DWORD(-1).value def not_a_console(handle): if handle == INVALID_HANDLE_VALUE or handle is None: return True return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR or GetConsoleMode(handle, byref(DWORD())) == 0) old_stdout_fileno = None old_stderr_fileno = None if hasattr(sys.stdout, 'fileno'): old_stdout_fileno = sys.stdout.fileno() if hasattr(sys.stderr, 'fileno'): old_stderr_fileno = sys.stderr.fileno() STDOUT_FILENO = 1 STDERR_FILENO = 2 real_stdout = (old_stdout_fileno == STDOUT_FILENO) real_stderr = (old_stderr_fileno == STDERR_FILENO) if real_stdout: hStdout = GetStdHandle(STD_OUTPUT_HANDLE) if not_a_console(hStdout): real_stdout = False if real_stderr: hStderr = GetStdHandle(STD_ERROR_HANDLE) if not_a_console(hStderr): real_stderr = False if real_stdout or real_stderr: # BOOL WINAPI WriteConsoleW(HANDLE hOutput, LPWSTR lpBuffer, DWORD nChars, # LPDWORD lpCharsWritten, LPVOID lpReserved); WriteConsoleW = WINFUNCTYPE(BOOL, HANDLE, LPWSTR, DWORD, POINTER(DWORD), LPVOID)(("WriteConsoleW", windll.kernel32)) class UnicodeOutput: def __init__(self, hConsole, stream, fileno, name): self._hConsole = hConsole self._stream = stream self._fileno = fileno self.closed = False self.softspace = False self.mode = 'w' self.encoding = 'utf-8' self.name = name self.errors = '' self.flush() def isatty(self): return False def close(self): # don't really close the handle, that would only cause problems self.closed = True def fileno(self): return self._fileno def flush(self): if self._hConsole is None: try: self._stream.flush() except Exception as e: _complain("%s.flush: %r from %r" % (self.name, e, self._stream)) raise def write(self, text): try: if self._hConsole is None: if isinstance(text, str): text = text.encode('utf-8') self._stream.write(text) else: if not isinstance(text, str): text = str(text).decode('utf-8') remaining = len(text) while remaining: n = DWORD(0) # There is a shorter-than-documented limitation on the # length of the string passed to WriteConsoleW (see # . retval = WriteConsoleW(self._hConsole, text, min(remaining, 10000), byref(n), None) if retval == 0 or n.value == 0: raise IOError("WriteConsoleW returned %r, n.value = %r" % (retval, n.value)) remaining -= n.value if not remaining: break text = text[n.value:] except Exception as e: _complain("%s.write: %r" % (self.name, e)) raise def writelines(self, lines): try: for line in lines: self.write(line) except Exception as e: _complain("%s.writelines: %r" % (self.name, e)) raise if real_stdout: sys.stdout = UnicodeOutput(hStdout, None, STDOUT_FILENO, '') else: sys.stdout = UnicodeOutput(None, sys.stdout, old_stdout_fileno, '') if real_stderr: sys.stderr = UnicodeOutput(hStderr, None, STDERR_FILENO, '') else: sys.stderr = UnicodeOutput(None, sys.stderr, old_stderr_fileno, '') except Exception as e: _complain("exception %r while fixing up sys.stdout and sys.stderr" % (e,)) # While we're at it, let's unmangle the command-line arguments: # This works around . GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))(("CommandLineToArgvW", windll.shell32)) argc = c_int(0) argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc)) argv = [argv_unicode[i].encode('utf-8').decode('utf-8') for i in range(0, argc.value)] if not hasattr(sys, 'frozen'): # If this is an executable produced by py2exe or bbfreeze, then it will # have been invoked directly. Otherwise, unicode_argv[0] is the Python # interpreter, so skip that. argv = argv[1:] # Also skip option arguments to the Python interpreter. while len(argv) > 0: arg = argv[0] if not arg.startswith("-") or arg == "-": break argv = argv[1:] if arg == '-m': # sys.argv[0] should really be the absolute path of the module source, # but never mind break if arg == '-c': argv[0] = '-c' break # if you like: sys.argv = argv #### __init__.py #### addrindexrs.py import logging logger = logging.getLogger(__name__) import sys import os import json import requests from requests.exceptions import Timeout, ReadTimeout, ConnectionError import time import threading import socket import concurrent.futures import collections import binascii import hashlib import signal import bitcoin.wallet from counterpartylib.lib import config, util, address READ_BUF_SIZE = 65536 SOCKET_TIMEOUT = 5.0 BACKEND_PING_TIME = 30.0 raw_transactions_cache = util.DictCache(size=config.BACKEND_RAW_TRANSACTIONS_CACHE_SIZE) # used in getrawtransaction_batch() class BackendRPCError(Exception): pass class AddrIndexRsRPCError(Exception): pass def rpc_call(payload): """Calls to bitcoin core and returns the response""" url = config.BACKEND_URL response = None TRIES = 12 for i in range(TRIES): try: response = requests.post(url, data=json.dumps(payload), headers={'content-type': 'application/json'}, verify=(not config.BACKEND_SSL_NO_VERIFY), timeout=config.REQUESTS_TIMEOUT) if i > 0: logger.debug('Successfully connected.') break except (Timeout, ReadTimeout, ConnectionError): logger.debug('Could not connect to backend at `{}`. (Try {}/{})'.format(util.clean_url_for_log(url), i+1, TRIES)) time.sleep(5) if response == None: if config.TESTNET: network = 'testnet' elif config.REGTEST: network = 'regtest' else: network = 'mainnet' raise BackendRPCError('Cannot communicate with backend at `{}`. (server is set to run on {}, is backend?)'.format(util.clean_url_for_log(url), network)) elif response.status_code in (401,): raise BackendRPCError('Authorization error connecting to {}: {} {}'.format(util.clean_url_for_log(url), response.status_code, response.reason)) elif response.status_code not in (200, 500): raise BackendRPCError(str(response.status_code) + ' ' + response.reason) # Handle json decode errors try: response_json = response.json() except json.decoder.JSONDecodeError as e: raise BackendRPCError('Received invalid JSON from backend with a response of {}'.format(str(response.status_code) + ' ' + response.reason)) # Batch query returns a list if isinstance(response_json, list): return response_json if 'error' not in response_json.keys() or response_json['error'] == None: return response_json['result'] elif response_json['error']['code'] == -5: # RPC_INVALID_ADDRESS_OR_KEY raise BackendRPCError('{} Is `txindex` enabled in {} Core?'.format(response_json['error'], config.BTC_NAME)) elif response_json['error']['code'] in [-28, -8, -2]: # “Verifying blocks...” or “Block height out of range” or “The network does not appear to fully agree!“ logger.debug('Backend not ready. Sleeping for ten seconds.') # If Bitcoin Core takes more than `sys.getrecursionlimit() * 10 = 9970` # seconds to start, this’ll hit the maximum recursion depth limit. time.sleep(10) return rpc_call(payload) else: raise BackendRPCError('Error connecting to {}: {}'.format(util.clean_url_for_log(url), response_json['error'])) def rpc(method, params): payload = { "method": method, "params": params, "jsonrpc": "2.0", "id": 0, } return rpc_call(payload) def rpc_batch(request_list): responses = collections.deque() def make_call(chunk): #send a list of requests to bitcoind to be executed #note that this is list executed serially, in the same thread in bitcoind #e.g. see: https://github.com/bitcoin/bitcoin/blob/master/src/rpcserver.cpp#L939 responses.extend(rpc_call(chunk)) chunks = util.chunkify(request_list, config.RPC_BATCH_SIZE) with concurrent.futures.ThreadPoolExecutor(max_workers=config.BACKEND_RPC_BATCH_NUM_WORKERS) as executor: for chunk in chunks: executor.submit(make_call, chunk) return list(responses) def extract_addresses(txhash_list): logger.debug('extract_addresses, txs: %d' % (len(txhash_list), )) tx_hashes_tx = getrawtransaction_batch(txhash_list, verbose=True) return extract_addresses_from_txlist(tx_hashes_tx, getrawtransaction_batch) def extract_addresses_from_txlist(tx_hashes_tx, _getrawtransaction_batch): """ helper for extract_addresses, seperated so we can pass in a mocked _getrawtransaction_batch for test purposes """ logger.debug('extract_addresses_from_txlist, txs: %d' % (len(tx_hashes_tx.keys()), )) tx_hashes_addresses = {} tx_inputs_hashes = set() # use set to avoid duplicates for tx_hash, tx in tx_hashes_tx.items(): tx_hashes_addresses[tx_hash] = set() for vout in tx['vout']: if 'addresses' in vout['scriptPubKey']: tx_hashes_addresses[tx_hash].update(tuple(vout['scriptPubKey']['addresses'])) tx_inputs_hashes.update([vin['txid'] for vin in tx['vin']]) logger.debug('extract_addresses, input TXs: %d' % (len(tx_inputs_hashes), )) # chunk txs to avoid huge memory spikes for tx_inputs_hashes_chunk in util.chunkify(list(tx_inputs_hashes), config.BACKEND_RAW_TRANSACTIONS_CACHE_SIZE): raw_transactions = _getrawtransaction_batch(tx_inputs_hashes_chunk, verbose=True) for tx_hash, tx in tx_hashes_tx.items(): for vin in tx['vin']: vin_tx = raw_transactions.get(vin['txid'], None) if not vin_tx: continue vout = vin_tx['vout'][vin['vout']] if 'addresses' in vout['scriptPubKey']: tx_hashes_addresses[tx_hash].update(tuple(vout['scriptPubKey']['addresses'])) return tx_hashes_addresses, tx_hashes_tx def getblockcount(): return rpc('getblockcount', []) def getblockhash(blockcount): return rpc('getblockhash', [blockcount]) def getblock(block_hash): return rpc('getblock', [block_hash, False]) def getrawtransaction(tx_hash, verbose=False, skip_missing=False): return getrawtransaction_batch([tx_hash], verbose=verbose, skip_missing=skip_missing)[tx_hash] def getrawmempool(): return rpc('getrawmempool', []) def fee_per_kb(conf_target, mode, nblocks=None): """ :param conf_target: :param mode: :return: fee_per_kb in satoshis, or None when unable to determine """ if nblocks is None and conf_target is None: conf_target = nblocks feeperkb = rpc('estimatesmartfee', [conf_target, mode]) if 'errors' in feeperkb and feeperkb['errors'][0] == 'Insufficient data or no feerate found': return None return int(max(feeperkb['feerate'] * config.UNIT, config.DEFAULT_FEE_PER_KB_ESTIMATE_SMART)) def sendrawtransaction(tx_hex): return rpc('sendrawtransaction', [tx_hex]) GETRAWTRANSACTION_MAX_RETRIES=2 monotonic_call_id = 0 def getrawtransaction_batch(txhash_list, verbose=False, skip_missing=False, _retry=0): _logger = logger.getChild("getrawtransaction_batch") if len(txhash_list) > config.BACKEND_RAW_TRANSACTIONS_CACHE_SIZE: #don't try to load in more than BACKEND_RAW_TRANSACTIONS_CACHE_SIZE entries in a single call txhash_list_chunks = util.chunkify(txhash_list, config.BACKEND_RAW_TRANSACTIONS_CACHE_SIZE) txes = {} for txhash_list_chunk in txhash_list_chunks: txes.update(getrawtransaction_batch(txhash_list_chunk, verbose=verbose, skip_missing=skip_missing)) return txes tx_hash_call_id = {} payload = [] noncached_txhashes = set() txhash_list = set(txhash_list) # payload for transactions not in cache for tx_hash in txhash_list: if tx_hash not in raw_transactions_cache: #call_id = binascii.hexlify(os.urandom(5)).decode('utf8') # Don't drain urandom global monotonic_call_id monotonic_call_id = monotonic_call_id + 1 call_id = "{}".format(monotonic_call_id) payload.append({ "method": 'getrawtransaction', "params": [tx_hash, 1], "jsonrpc": "2.0", "id": call_id }) noncached_txhashes.add(tx_hash) tx_hash_call_id[call_id] = tx_hash #refresh any/all cache entries that already exist in the cache, # so they're not inadvertently removed by another thread before we can consult them #(this assumes that the size of the working set for any given workload doesn't exceed the max size of the cache) for tx_hash in txhash_list.difference(noncached_txhashes): raw_transactions_cache.refresh(tx_hash) _logger.debug("getrawtransaction_batch: txhash_list size: {} / raw_transactions_cache size: {} / # getrawtransaction calls: {}".format( len(txhash_list), len(raw_transactions_cache), len(payload))) # populate cache if len(payload) > 0: batch_responses = rpc_batch(payload) for response in batch_responses: if 'error' not in response or response['error'] is None: tx_hex = response['result'] tx_hash = tx_hash_call_id[response['id']] raw_transactions_cache[tx_hash] = tx_hex elif skip_missing and 'error' in response and response['error']['code'] == -5: raw_transactions_cache[tx_hash] = None logging.debug('Missing TX with no raw info skipped (txhash: {}): {}'.format( tx_hash_call_id.get(response.get('id', '??'), '??'), response['error'])) else: #TODO: this seems to happen for bogus transactions? Maybe handle it more gracefully than just erroring out? raise BackendRPCError('{} (txhash:: {})'.format(response['error'], tx_hash_call_id.get(response.get('id', '??'), '??'))) # get transactions from cache result = {} for tx_hash in txhash_list: try: if verbose: result[tx_hash] = raw_transactions_cache[tx_hash] else: result[tx_hash] = raw_transactions_cache[tx_hash]['hex'] if raw_transactions_cache[tx_hash] is not None else None except KeyError as e: #shows up most likely due to finickyness with addrindex not always returning results that we need... print("Key error in addrindexrs still exists!!!!!") _logger.warning("tx missing in rawtx cache: {} -- txhash_list size: {}, hash: {} / raw_transactions_cache size: {} / # rpc_batch calls: {} / txhash in noncached_txhashes: {} / txhash in txhash_list: {} -- list {}".format( e, len(txhash_list), hashlib.md5(json.dumps(list(txhash_list)).encode()).hexdigest(), len(raw_transactions_cache), len(payload), tx_hash in noncached_txhashes, tx_hash in txhash_list, list(txhash_list.difference(noncached_txhashes)) )) if _retry < GETRAWTRANSACTION_MAX_RETRIES: #try again time.sleep(0.05 * (_retry + 1)) # Wait a bit, hitting the index non-stop may cause it to just break down... TODO: Better handling r = getrawtransaction_batch([tx_hash], verbose=verbose, skip_missing=skip_missing, _retry=_retry+1) result[tx_hash] = r[tx_hash] else: raise #already tried again, give up return result class AddrIndexRsThread (threading.Thread): def __init__(self, host, port): threading.Thread.__init__(self) self.host = host self.port = port self.sock = None self.lastId = 0 self.message_to_send = None self.message_result = None self.is_killed = False def stop(self): logging.debug('AddrIndexRs thread closing') self.send({"kill": True}) def connect(self): self.lastId = 0 while True: logging.info('AddrIndexRs connecting...') self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.settimeout(SOCKET_TIMEOUT) try: self.sock.connect((self.host, self.port)) except: logging.info('Error connecting to AddrIndexRs! Retrying in a few seconds') time.sleep(5.0) else: logging.info('Connected to AddrIndexRs!') break def run(self): self.locker = threading.Condition() self.locker.acquire() self.connect() while self.locker.wait(): if not(self.is_killed) and self.message_to_send != None: msg = self.message_to_send self.message_to_send = None retry_count = 15 while retry_count > 0: has_sent = False while not(has_sent) and msg: try: logging.debug('AddrIndexRs sending') self.sock.send(msg) has_sent = True except Exception as e: #try: logging.debug('AddrIndexRs error:' + e) self.connect() #except Exception as e2: #logging.debug('AddrIndexRs fatal error:' + e2) self.message_to_send = None data = b"" parsed = False while not(parsed): try: data = data + self.sock.recv(READ_BUF_SIZE) self.message_result = json.loads(data.decode('utf-8')) retry_count = 0 parsed = True logging.debug('AddrIndexRs Recv complete!') except socket.timeout: logging.debug('AddrIndexRs Recv timeout error sending: '+str(msg)) if retry_count <= 0: self.connect() self.message_result = None retry_count -= -1 except socket.error as e: logging.debug('AddrIndexRs Recv error:' + str(e)+' with msg '+str(msg)) self.connect() except Exception as e: logging.debug('AddrIndexRs Recv error:' + str(e)+' with msg '+str(msg)) if retry_count <= 0: raise e self.message_result = None retry_count -= 1 finally: self.locker.notify() else: self.locker.notify() self.sock.close() logging.debug('AddrIndexRs socket closed normally') def send(self, msg): self.locker.acquire() if not("kill" in msg): msg["id"] = self.lastId self.lastId += 1 self.message_to_send = (json.dumps(msg) + "\n").encode('utf8') self.locker.notify() self.locker.wait() self.locker.release() return self.message_result _backend = None def ensure_addrindexrs_connected(): global _backend backoff = 500 max_backoff = 5000 while _backend == None: try: _backend = AddrIndexRsThread(config.INDEXD_CONNECT, config.INDEXD_PORT) _backend.daemon = True _backend.start() _backend.send({ "method": "server.version", "params": [] }) except Exception as e: logger.debug(e) time.sleep(backoff) backoff = min(backoff * 1.5, max_backoff) def _script_pubkey_to_hash(spk): return hashlib.sha256(spk).digest()[::-1].hex() def _address_to_hash(addr): script_pubkey = bitcoin.wallet.CBitcoinAddress(addr).to_scriptPubKey() return _script_pubkey_to_hash(script_pubkey) # Returns an array of UTXOS from an address in the following format # { # "txId": utxo_txid_hex, # "vout": num, # "height": num, # "value": sats, # "confirmations": num # } # [{"txId":"a0d12eb3716e2e70fd00525486ace0da2947f82d818b7be0285f16ff672cf237","vout":5,"height":647484,"value":30455293,"confirmations":2}] # def unpack_outpoint(outpoint): txid, vout = outpoint.split(':') return (txid, int(vout)) def unpack_vout(outpoint, tx, block_count): if tx is None: return None vout = tx["vout"][outpoint[1]] height = -1 if "confirmations" in tx and tx["confirmations"] > 0: height = block_count - tx["confirmations"] + 1 else: tx["confirmations"] = 0 return { "txId": tx["txid"], "vout": outpoint[1], "height": height, "value": int(round(vout["value"] * config.UNIT)), "confirmations": tx["confirmations"] } def get_unspent_txouts(source): ensure_addrindexrs_connected() block_count = getblockcount() result = _backend.send({ "method": "blockchain.scripthash.get_utxos", "params": [_address_to_hash(source)] }) if not(result is None) and "result" in result: result = result["result"] result = [unpack_outpoint(x) for x in result] # each item on the result array is like # {"tx_hash": hex_encoded_hash} batch = getrawtransaction_batch([x[0] for x in result], verbose=True, skip_missing=True) batch = [unpack_vout(outpoint, batch[outpoint[0]], block_count) for outpoint in result if outpoint[0] in batch] batch = [x for x in batch if x is not None] return batch else: return [] # Returns transactions in the following format # { # "blockhash": hexstring, # "blocktime": num, # "confirmations": num, # "hash": hexstring, # "height": num, # "hex": hexstring, # "locktime": num, # "size": num, # "time": num, # "txid": hexstring, # "version": num, # "vsize": num, # "weight": num, # "vin": [ # { # "txinwitness": array of hex_witness_program, // Only if it's a witness-containing tx # "vout": num, # "txid": hexstring, # "sequence": num, # "coinbase": X, // contents not important, this is only present if the tx is a coinbase # "scriptSig": { # "asm": asm_decompiled_program, # "hex": hex_program # } # },... # ], # "vout": [ # { # "n": num, # "value": decimal, # "scriptPubKey": { # "type": string, # "reqSigs": num, # "hex": hexstring, // the program in hex # "asm": string, // the decompiled program # "addresses": [ ...list of found addresses on the program ] # } # } # ] # # } def search_raw_transactions(address, unconfirmed=True): ensure_addrindexrs_connected() hsh = _address_to_hash(address) txs = _backend.send({ "method": "blockchain.scripthash.get_history", "params": [hsh] })["result"] batch = getrawtransaction_batch([x["tx_hash"] for x in txs], verbose=True) if not(unconfirmed): batch = [x for x in batch if x.height >= 0] return batch # Returns the number of blocks the backend is behind the node def getindexblocksbehind(): # Addrindexrs never "gets behind" return 0 def init(): ensure_addrindexrs_connected() def stop(): if '_backend' in globals(): _backend.stop() #### indexd.py import logging logger = logging.getLogger(__name__) import sys import os import json import requests from requests.exceptions import Timeout, ReadTimeout, ConnectionError import time import threading import concurrent.futures import collections import binascii import hashlib from counterpartylib.lib import config, util raw_transactions_cache = util.DictCache(size=config.BACKEND_RAW_TRANSACTIONS_CACHE_SIZE) # used in getrawtransaction_batch() class BackendRPCError(Exception): pass class IndexdRPCError(Exception): pass def rpc_call(payload): """Calls to bitcoin core and returns the response""" url = config.BACKEND_URL response = None TRIES = 12 for i in range(TRIES): try: response = requests.post(url, data=json.dumps(payload), headers={'content-type': 'application/json'}, verify=(not config.BACKEND_SSL_NO_VERIFY), timeout=config.REQUESTS_TIMEOUT) if i > 0: logger.debug('Successfully connected.') break except (Timeout, ReadTimeout, ConnectionError): logger.debug('Could not connect to backend at `{}`. (Try {}/{})'.format(util.clean_url_for_log(url), i+1, TRIES)) time.sleep(5) if response == None: if config.TESTNET: network = 'testnet' elif config.REGTEST: network = 'regtest' else: network = 'mainnet' raise BackendRPCError('Cannot communicate with backend at `{}`. (server is set to run on {}, is backend?)'.format(util.clean_url_for_log(url), network)) elif response.status_code in (401,): raise BackendRPCError('Authorization error connecting to {}: {} {}'.format(util.clean_url_for_log(url), response.status_code, response.reason)) elif response.status_code not in (200, 500): raise BackendRPCError(str(response.status_code) + ' ' + response.reason) # Handle json decode errors try: response_json = response.json() except json.decoder.JSONDecodeError as e: raise BackendRPCError('Received invalid JSON from backend with a response of {}'.format(str(response.status_code) + ' ' + response.reason)) # Batch query returns a list if isinstance(response_json, list): return response_json if 'error' not in response_json.keys() or response_json['error'] == None: return response_json['result'] elif response_json['error']['code'] == -5: # RPC_INVALID_ADDRESS_OR_KEY raise BackendRPCError('{} Is `txindex` enabled in {} Core?'.format(response_json['error'], config.BTC_NAME)) elif response_json['error']['code'] in [-28, -8, -2]: # “Verifying blocks...” or “Block height out of range” or “The network does not appear to fully agree!“ logger.debug('Backend not ready. Sleeping for ten seconds.') # If Bitcoin Core takes more than `sys.getrecursionlimit() * 10 = 9970` # seconds to start, this’ll hit the maximum recursion depth limit. time.sleep(10) return rpc_call(payload) else: raise BackendRPCError('Error connecting to {}: {}'.format(util.clean_url_for_log(url), response_json['error'])) def rpc(method, params): payload = { "method": method, "params": params, "jsonrpc": "2.0", "id": 0, } return rpc_call(payload) def rpc_batch(request_list): responses = collections.deque() def make_call(chunk): #send a list of requests to bitcoind to be executed #note that this is list executed serially, in the same thread in bitcoind #e.g. see: https://github.com/bitcoin/bitcoin/blob/master/src/rpcserver.cpp#L939 responses.extend(rpc_call(chunk)) chunks = util.chunkify(request_list, config.RPC_BATCH_SIZE) with concurrent.futures.ThreadPoolExecutor(max_workers=config.BACKEND_RPC_BATCH_NUM_WORKERS) as executor: for chunk in chunks: executor.submit(make_call, chunk) return list(responses) def extract_addresses(txhash_list): logger.debug('extract_addresses, txs: %d' % (len(txhash_list), )) tx_hashes_tx = getrawtransaction_batch(txhash_list, verbose=True) return extract_addresses_from_txlist(tx_hashes_tx, getrawtransaction_batch) def extract_addresses_from_txlist(tx_hashes_tx, _getrawtransaction_batch): """ helper for extract_addresses, seperated so we can pass in a mocked _getrawtransaction_batch for test purposes """ logger.debug('extract_addresses_from_txlist, txs: %d' % (len(tx_hashes_tx.keys()), )) tx_hashes_addresses = {} tx_inputs_hashes = set() # use set to avoid duplicates for tx_hash, tx in tx_hashes_tx.items(): tx_hashes_addresses[tx_hash] = set() for vout in tx['vout']: if 'addresses' in vout['scriptPubKey']: tx_hashes_addresses[tx_hash].update(tuple(vout['scriptPubKey']['addresses'])) tx_inputs_hashes.update([vin['txid'] for vin in tx['vin']]) logger.debug('extract_addresses, input TXs: %d' % (len(tx_inputs_hashes), )) # chunk txs to avoid huge memory spikes for tx_inputs_hashes_chunk in util.chunkify(list(tx_inputs_hashes), config.BACKEND_RAW_TRANSACTIONS_CACHE_SIZE): raw_transactions = _getrawtransaction_batch(tx_inputs_hashes_chunk, verbose=True) for tx_hash, tx in tx_hashes_tx.items(): for vin in tx['vin']: vin_tx = raw_transactions.get(vin['txid'], None) if not vin_tx: continue vout = vin_tx['vout'][vin['vout']] if 'addresses' in vout['scriptPubKey']: tx_hashes_addresses[tx_hash].update(tuple(vout['scriptPubKey']['addresses'])) return tx_hashes_addresses, tx_hashes_tx def getblockcount(): return rpc('getblockcount', []) def getblockhash(blockcount): return rpc('getblockhash', [blockcount]) def getblock(block_hash): return rpc('getblock', [block_hash, False]) def getrawtransaction(tx_hash, verbose=False, skip_missing=False): return getrawtransaction_batch([tx_hash], verbose=verbose, skip_missing=skip_missing)[tx_hash] def getrawmempool(): return rpc('getrawmempool', []) def fee_per_kb(conf_target, mode, nblocks=None): """ :param conf_target: :param mode: :return: fee_per_kb in satoshis, or None when unable to determine """ if nblocks is None and conf_target is None: conf_target = nblocks feeperkb = rpc('estimatesmartfee', [conf_target, mode]) if 'errors' in feeperkb and feeperkb['errors'][0] == 'Insufficient data or no feerate found': return None return int(max(feeperkb['feerate'] * config.UNIT, config.DEFAULT_FEE_PER_KB_ESTIMATE_SMART)) def sendrawtransaction(tx_hex): return rpc('sendrawtransaction', [tx_hex]) GETRAWTRANSACTION_MAX_RETRIES=2 def getrawtransaction_batch(txhash_list, verbose=False, skip_missing=False, _retry=0): _logger = logger.getChild("getrawtransaction_batch") if len(txhash_list) > config.BACKEND_RAW_TRANSACTIONS_CACHE_SIZE: #don't try to load in more than BACKEND_RAW_TRANSACTIONS_CACHE_SIZE entries in a single call txhash_list_chunks = util.chunkify(txhash_list, config.BACKEND_RAW_TRANSACTIONS_CACHE_SIZE) txes = {} for txhash_list_chunk in txhash_list_chunks: txes.update(getrawtransaction_batch(txhash_list_chunk, verbose=verbose, skip_missing=skip_missing)) return txes tx_hash_call_id = {} payload = [] noncached_txhashes = set() txhash_list = set(txhash_list) # payload for transactions not in cache for tx_hash in txhash_list: if tx_hash not in raw_transactions_cache: call_id = binascii.hexlify(os.urandom(5)).decode('utf8') payload.append({ "method": 'getrawtransaction', "params": [tx_hash, 1], "jsonrpc": "2.0", "id": call_id }) noncached_txhashes.add(tx_hash) tx_hash_call_id[call_id] = tx_hash #refresh any/all cache entries that already exist in the cache, # so they're not inadvertently removed by another thread before we can consult them #(this assumes that the size of the working set for any given workload doesn't exceed the max size of the cache) for tx_hash in txhash_list.difference(noncached_txhashes): raw_transactions_cache.refresh(tx_hash) _logger.debug("getrawtransaction_batch: txhash_list size: {} / raw_transactions_cache size: {} / # getrawtransaction calls: {}".format( len(txhash_list), len(raw_transactions_cache), len(payload))) # populate cache if len(payload) > 0: batch_responses = rpc_batch(payload) for response in batch_responses: if 'error' not in response or response['error'] is None: tx_hex = response['result'] tx_hash = tx_hash_call_id[response['id']] raw_transactions_cache[tx_hash] = tx_hex elif skip_missing and 'error' in response and response['error']['code'] == -5: raw_transactions_cache[tx_hash] = None logging.debug('Missing TX with no raw info skipped (txhash: {}): {}'.format( tx_hash_call_id.get(response.get('id', '??'), '??'), response['error'])) else: #TODO: this seems to happen for bogus transactions? Maybe handle it more gracefully than just erroring out? raise BackendRPCError('{} (txhash:: {})'.format(response['error'], tx_hash_call_id.get(response.get('id', '??'), '??'))) # get transactions from cache result = {} for tx_hash in txhash_list: try: if verbose: result[tx_hash] = raw_transactions_cache[tx_hash] else: result[tx_hash] = raw_transactions_cache[tx_hash]['hex'] if raw_transactions_cache[tx_hash] is not None else None except KeyError as e: #shows up most likely due to finickyness with addrindex not always returning results that we need... _logger.warning("tx missing in rawtx cache: {} -- txhash_list size: {}, hash: {} / raw_transactions_cache size: {} / # rpc_batch calls: {} / txhash in noncached_txhashes: {} / txhash in txhash_list: {} -- list {}".format( e, len(txhash_list), hashlib.md5(json.dumps(list(txhash_list)).encode()).hexdigest(), len(raw_transactions_cache), len(payload), tx_hash in noncached_txhashes, tx_hash in txhash_list, list(txhash_list.difference(noncached_txhashes)) )) if _retry < GETRAWTRANSACTION_MAX_RETRIES: #try again time.sleep(0.05 * (_retry + 1)) # Wait a bit, hitting the index non-stop may cause it to just break down... TODO: Better handling r = getrawtransaction_batch([tx_hash], verbose=verbose, skip_missing=skip_missing, _retry=_retry+1) result[tx_hash] = r[tx_hash] else: raise #already tried again, give up return result def get_unspent_txouts(source): return indexd_rpc_call('/a/'+source+'/utxos') def search_raw_transactions(address, unconfirmed=True): all_transactions = indexd_rpc_call('/a/'+address+'/txs?verbose=1') if unconfirmed: return all_transactions # filter for confirmed transactions only confirmed_transactions = list(filter(lambda t: 'confirmations' in t and t['confirmations'] > 0, all_transactions)) return confirmed_transactions def getindexblocksbehind(): status = indexd_rpc_call('/status') if status['ready']: return 0 if status['blocksBehind']: return status['blocksBehind'] raise IndexdRPCError('Unknown status for indexd') def indexd_rpc_call(path): url = config.INDEXD_URL+path response = None try: response = requests.get(url, headers={'content-type': 'application/json'}, timeout=config.REQUESTS_TIMEOUT) except (Timeout, ReadTimeout, ConnectionError): logger.debug('Could not connect to backend at `{}`.'.format(util.clean_url_for_log(url),)) if response == None: if config.TESTNET: network = 'testnet' elif config.REGTEST: network = 'regtest' else: network = 'mainnet' raise IndexdRPCError('Cannot communicate with {} indexd server at `{}`.'.format(network, util.clean_url_for_log(url))) elif response.status_code == 400: raise IndexdRPCError('Indexd returned error: {} {} {}'.format(response.status_code, response.reason, response.text)) elif response.status_code not in (200, 500): raise IndexdRPCError("Bad response from {}: {} {}".format(util.clean_url_for_log(url), response.status_code, response.reason)) # Return result, with error handling. response_json = response.json() if isinstance(response_json, (list, tuple)) or 'error' not in response_json.keys() or response_json['error'] == None: return response_json else: raise IndexdRPCError('{}'.format(response_json['error'])) # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 #### __init__.py import getpass import binascii import logging logger = logging.getLogger(__name__) import sys import json import time from decimal import Decimal as D import bitcoin as bitcoinlib import bitcoin.rpc as bitcoinlib_rpc from bitcoin.core import CBlock from counterpartylib.lib import util from counterpartylib.lib import script from counterpartylib.lib import config from counterpartylib.lib import exceptions from counterpartylib.lib.backend import addrindexrs MEMPOOL_CACHE_INITIALIZED = False PRETX_CACHE = {} def sortkeypicker(keynames): """http://stackoverflow.com/a/1143719""" negate = set() for i, k in enumerate(keynames): if k[:1] == '-': keynames[i] = k[1:] negate.add(k[1:]) def getit(adict): composite = [adict[k] for k in keynames] for i, (k, v) in enumerate(zip(keynames, composite)): if k in negate: composite[i] = -v return composite return getit def BACKEND(): mdl = sys.modules['counterpartylib.lib.backend.{}'.format(config.BACKEND_NAME)] mdl.init() return mdl def stop(): BACKEND().stop() def getblockcount(): return BACKEND().getblockcount() def getblockhash(blockcount): return BACKEND().getblockhash(blockcount) def getblock(block_hash): block_hex = BACKEND().getblock(block_hash) return CBlock.deserialize(util.unhexlify(block_hex)) def cache_pretx(txid, rawtx): PRETX_CACHE[binascii.hexlify(txid).decode('utf8')] = binascii.hexlify(rawtx).decode('utf8') def clear_pretx(txid): del PRETX_CACHE[binascii.hexlify(txid).decode('utf8')] def getrawtransaction(tx_hash, verbose=False, skip_missing=False): if tx_hash in PRETX_CACHE: return PRETX_CACHE[tx_hash] else: return BACKEND().getrawtransaction(tx_hash, verbose=verbose, skip_missing=skip_missing) def getrawtransaction_batch(txhash_list, verbose=False, skip_missing=False): return BACKEND().getrawtransaction_batch(txhash_list, verbose=verbose, skip_missing=skip_missing) def sendrawtransaction(tx_hex): return BACKEND().sendrawtransaction(tx_hex) def getrawmempool(): return BACKEND().getrawmempool() def getindexblocksbehind(): return BACKEND().getindexblocksbehind() def extract_addresses(txhash_list): return BACKEND().extract_addresses(txhash_list) def ensure_script_pub_key_for_inputs(coins): txhash_set = set() for coin in coins: if 'scriptPubKey' not in coin: txhash_set.add(coin['txid']) if len(txhash_set) > 0: txs = BACKEND().getrawtransaction_batch(list(txhash_set), verbose=True, skip_missing=False) for coin in coins: if 'scriptPubKey' not in coin: # get the scriptPubKey txid = coin['txid'] for vout in txs[txid]['vout']: if vout['n'] == coin['vout']: coin['scriptPubKey'] = vout['scriptPubKey']['hex'] return coins def fee_per_kb(conf_target, mode, nblocks=None): """ :param conf_target: :param mode: :return: fee_per_kb in satoshis, or None when unable to determine """ return BACKEND().fee_per_kb(conf_target, mode, nblocks=nblocks) def deserialize(tx_hex): return bitcoinlib.core.CTransaction.deserialize(binascii.unhexlify(tx_hex)) def serialize(ctx): return bitcoinlib.core.CTransaction.serialize(ctx) def is_valid(address): try: script.validate(address) return True except script.AddressError: return False def get_txhash_list(block): return [bitcoinlib.core.b2lx(ctx.GetHash()) for ctx in block.vtx] def get_tx_list(block): raw_transactions = {} tx_hash_list = [] for ctx in block.vtx: if util.enabled('correct_segwit_txids'): hsh = ctx.GetTxid() else: hsh = ctx.GetHash() tx_hash = bitcoinlib.core.b2lx(hsh) raw = ctx.serialize() tx_hash_list.append(tx_hash) raw_transactions[tx_hash] = bitcoinlib.core.b2x(raw) return (tx_hash_list, raw_transactions) def sort_unspent_txouts(unspent, unconfirmed=False): # Filter out all dust amounts to avoid bloating the resultant transaction unspent = list(filter(lambda x: x['value'] > config.DEFAULT_MULTISIG_DUST_SIZE, unspent)) # Sort by amount, using the largest UTXOs available if config.REGTEST: # REGTEST has a lot of coinbase inputs that can't be spent due to maturity # this doesn't usually happens on mainnet or testnet because most fednodes aren't mining unspent = sorted(unspent, key=lambda x: (x['confirmations'], x['value']), reverse=True) else: unspent = sorted(unspent, key=lambda x: x['value'], reverse=True) return unspent def get_btc_supply(normalize=False): """returns the total supply of {} (based on what Bitcoin Core says the current block height is)""".format(config.BTC) block_count = getblockcount() blocks_remaining = block_count total_supply = 0 reward = 50.0 while blocks_remaining > 0: if blocks_remaining >= 210000: blocks_remaining -= 210000 total_supply += 210000 * reward reward /= 2 else: total_supply += (blocks_remaining * reward) blocks_remaining = 0 return total_supply if normalize else int(total_supply * config.UNIT) class MempoolError(Exception): pass def get_unspent_txouts(source, unconfirmed=False, unspent_tx_hash=None): """returns a list of unspent outputs for a specific address @return: A list of dicts, with each entry in the dict having the following keys: """ unspent = BACKEND().get_unspent_txouts(source) # filter by unspent_tx_hash if unspent_tx_hash is not None: unspent = list(filter(lambda x: x['txId'] == unspent_tx_hash, unspent)) # filter unconfirmed if not unconfirmed: unspent = [utxo for utxo in unspent if utxo['confirmations'] > 0] # format for utxo in unspent: utxo['amount'] = float(utxo['value'] / config.UNIT) utxo['txid'] = utxo['txId'] del utxo['txId'] # do not add scriptPubKey return unspent def search_raw_transactions(address, unconfirmed=True): return BACKEND().search_raw_transactions(address, unconfirmed) class UnknownPubKeyError(Exception): pass def pubkeyhash_to_pubkey(pubkeyhash, provided_pubkeys=None): # Search provided pubkeys. if provided_pubkeys: if type(provided_pubkeys) != list: provided_pubkeys = [provided_pubkeys] for pubkey in provided_pubkeys: if pubkeyhash == script.pubkey_to_pubkeyhash(util.unhexlify(pubkey)): return pubkey elif pubkeyhash == script.pubkey_to_p2whash(util.unhexlify(pubkey)): return pubkey # Search blockchain. raw_transactions = search_raw_transactions(pubkeyhash, unconfirmed=True) for tx_id in raw_transactions: tx = raw_transactions[tx_id] for vin in tx['vin']: if 'txinwitness' in vin: if len(vin['txinwitness']) >= 2: # catch unhexlify errs for when txinwitness[1] isn't a witness program (eg; for P2W) try: pubkey = vin['txinwitness'][1] if pubkeyhash == script.pubkey_to_p2whash(util.unhexlify(pubkey)): return pubkey except binascii.Error: pass elif 'coinbase' not in vin: scriptsig = vin['scriptSig'] asm = scriptsig['asm'].split(' ') if len(asm) >= 2: # catch unhexlify errs for when asm[1] isn't a pubkey (eg; for P2SH) try: pubkey = asm[1] if pubkeyhash == script.pubkey_to_pubkeyhash(util.unhexlify(pubkey)): return pubkey except binascii.Error: pass raise UnknownPubKeyError('Public key was neither provided nor published in blockchain.') def multisig_pubkeyhashes_to_pubkeys(address, provided_pubkeys=None): signatures_required, pubkeyhashes, signatures_possible = script.extract_array(address) pubkeys = [pubkeyhash_to_pubkey(pubkeyhash, provided_pubkeys) for pubkeyhash in pubkeyhashes] return script.construct_array(signatures_required, pubkeys, signatures_possible) def init_mempool_cache(): """prime the mempool cache, so that functioning is faster... """ global MEMPOOL_CACHE_INITIALIZED logger.debug('Initializing mempool cache...') start = time.time() mempool_txhash_list = getrawmempool() #with this function, don't try to load in more than BACKEND_RAW_TRANSACTIONS_CACHE_SIZE entries num_tx = min(len(mempool_txhash_list), config.BACKEND_RAW_TRANSACTIONS_CACHE_SIZE) mempool_tx = BACKEND().getrawtransaction_batch(mempool_txhash_list[:num_tx], skip_missing=True, verbose=True) vin_txhash_list = [] max_remaining_num_tx = config.BACKEND_RAW_TRANSACTIONS_CACHE_SIZE - num_tx if max_remaining_num_tx: for txid in mempool_tx: tx = mempool_tx[txid] if not(tx is None): vin_txhash_list += [vin['txid'] for vin in tx['vin']] BACKEND().getrawtransaction_batch(vin_txhash_list[:max_remaining_num_tx], skip_missing=True, verbose=True) MEMPOOL_CACHE_INITIALIZED = True logger.info('Mempool cache initialized: {:.2f}s for {:,} transactions'.format(time.time() - start, num_tx + min(max_remaining_num_tx, len(vin_txhash_list)))) # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 #### bc_data_stream.py # # Workalike python implementation of Bitcoin's CDataStream class. # import struct import mmap from .exceptions import SerializationError class BCDataStream(object): def __init__(self): self.input = None self.read_cursor = 0 def clear(self): self.input = None self.read_cursor = 0 def write(self, bytes): # Initialize with string of bytes if self.input is None: self.input = bytes else: self.input += bytes def map_file(self, file, start): # Initialize with bytes from file self.input = mmap.mmap(file.fileno(), 0, access=mmap.ACCESS_READ) self.read_cursor = start def seek_file(self, position): self.read_cursor = position def close_file(self): self.input.close() def read_string(self): # Strings are encoded depending on length: # 0 to 252 : 1-byte-length followed by bytes (if any) # 253 to 65,535 : byte'253' 2-byte-length followed by bytes # 65,536 to 4,294,967,295 : byte '254' 4-byte-length followed by bytes # ... and the Bitcoin client is coded to understand: # greater than 4,294,967,295 : byte '255' 8-byte-length followed by bytes of string # ... but I don't think it actually handles any strings that big. if self.input is None: raise SerializationError("call write(bytes) before trying to deserialize") try: length = self.read_compact_size() except IndexError: raise SerializationError("attempt to read past end of buffer") return self.read_bytes(length) def write_string(self, string): # Length-encoded as with read-string self.write_compact_size(len(string)) self.write(string) def read_bytes(self, length): try: result = self.input[self.read_cursor:self.read_cursor+length] self.read_cursor += length return result except IndexError: raise SerializationError("attempt to read past end of buffer") return '' def read_boolean(self): return self.read_bytes(1)[0] != chr(0) def read_int16(self): return self._read_num('= 0: return bitcoinlib.core.script.MAX_SCRIPT_ELEMENT_SIZE - len(config.PREFIX) - pubkeylength - 12 #Two bytes are for unique offset. This will work for a little more than 1000 outputs else: return bitcoinlib.core.script.MAX_SCRIPT_ELEMENT_SIZE - len(config.PREFIX) - 44 # Redeemscript size for p2pkh addresses, multisig won't work here def calculate_outputs(destination_outputs, data_array, fee_per_kb, exact_fee=None): datatx_size = 10 # 10 base datatx_size += 181 # 181 for source input datatx_size += (25 + 9) * len(destination_outputs) # destination outputs datatx_size += 13 # opreturn that signals P2SH encoding datatx_size += len(data_array) * (9 + 181) # size of p2sh inputs, excl data datatx_size += sum([len(data_chunk) for data_chunk in data_array]) # data in scriptSig datatx_necessary_fee = int(datatx_size / 1000 * fee_per_kb) pretx_output_size = 10 # 10 base pretx_output_size += len(data_array) * 29 # size of P2SH output size_for_fee = pretx_output_size # split the tx fee evenly between all datatx outputs # data_value = math.ceil(datatx_necessary_fee / len(data_array)) data_value = config.DEFAULT_REGULAR_DUST_SIZE # adjust the data output with the new value and recalculate data_btc_out data_btc_out = data_value * len(data_array) if exact_fee: remain_fee = exact_fee - data_value * len(data_array) if remain_fee > 0: #if the dust isn't enough to reach the exact_fee, data value will be an array with only the last fee bumped data_value = [data_value for i in range(len(data_array))] data_value[len(data_array)-1] = data_value[len(data_array)-1] + remain_fee data_btc_out = exact_fee data_output = (data_array, data_value) logger.getChild('p2shdebug').debug('datatx size: %d fee: %d' % (datatx_size, datatx_necessary_fee)) logger.getChild('p2shdebug').debug('pretx output size: %d' % (pretx_output_size, )) logger.getChild('p2shdebug').debug('size_for_fee: %d' % (size_for_fee, )) return size_for_fee, datatx_necessary_fee, data_value, data_btc_out def decode_p2sh_input(asm, p2sh_is_segwit=False): ''' Looks at the scriptSig for the input of the p2sh-encoded data transaction [signature] [data] [OP_HASH160 ... OP_EQUAL] ''' pubkey, source, redeem_script_is_valid, found_data = decode_data_redeem_script(asm[-1], p2sh_is_segwit) if redeem_script_is_valid: # this is a signed transaction, so we got {sig[,sig]} {datachunk} {redeemScript} datachunk = found_data redeemScript = asm[-1] #asm[-2:] else: #print('ASM:', len(asm)) pubkey, source, redeem_script_is_valid, found_data = decode_data_redeem_script(asm[-1], p2sh_is_segwit) if not redeem_script_is_valid or len(asm) != 3: return None, None, None # this is an unsigned transaction (last is outputScript), so we got [datachunk] [redeemScript] [temporaryOutputScript] datachunk, redeemScript, _substituteScript = asm data = datachunk if data[:len(config.PREFIX)] == config.PREFIX: data = data[len(config.PREFIX):] else: if data == b'': return source, None, None raise exceptions.DecodeError('unrecognised P2SH output') return source, None, data def decode_data_push(arr, pos): pushlen = 0 data = b'' opcode = bitcoinlib.core.script.CScriptOp(arr[pos]) if opcode > 0 and opcode < bitcoinlib.core.script.OP_PUSHDATA1: pushlen = arr[pos] pos += 1 elif opcode == bitcoinlib.core.script.OP_PUSHDATA1: pushlen = arr[pos + 1] pos += 2 elif opcode == bitcoinlib.core.script.OP_PUSHDATA2: (pushlen, ) = struct.unpack(' 41 and \ redeemScript[0] == bitcoinlib.core.script.OP_DROP and \ redeemScript[script_len-4] == bitcoinlib.core.script.OP_DROP and \ redeemScript[script_len-3] == bitcoinlib.core.script.OP_DEPTH and \ redeemScript[script_len-2] == bitcoinlib.core.script.OP_0 and \ redeemScript[script_len-1] == bitcoinlib.core.script.OP_EQUAL: # - OP_DROP {arbitrary multisig script} [n] OP_DROP OP_DEPTH 0 OP_EQUAL pubkey = None source = None redeem_script_is_valid = True else: pubkey = None source = None redeem_script_is_valid = False try: opcode = bitcoinlib.core.script.CScriptOp(redeemScript[0]) if opcode > bitcoinlib.core.script.OP_0 and opcode < bitcoinlib.core.script.OP_PUSHDATA1 or \ opcode in (bitcoinlib.core.script.OP_PUSHDATA1, bitcoinlib.core.script.OP_PUSHDATA2, bitcoinlib.core.script.OP_PUSHDATA4): pos = 0 pos, found_data = decode_data_push(redeemScript, 0) if redeemScript[pos] == bitcoinlib.core.script.OP_DROP: pos += 1 valid_sig = False opcode = redeemScript[pos] if type(opcode) != type(''): if opcode >= bitcoinlib.core.script.OP_2 and opcode <= bitcoinlib.core.script.OP_15: # it's multisig req_sigs = opcode - bitcoinlib.core.script.OP_1 + 1 pos += 1 pubkey = None num_sigs = 0 found_sigs = False while not found_sigs: pos, npubkey = decode_data_push(redeemScript, pos) num_sigs += 1 if redeemScript[pos] - bitcoinlib.core.script.OP_1 + 1 == num_sigs: found_sigs = True pos += 1 valid_sig = redeemScript[pos] == bitcoinlib.core.script.OP_CHECKMULTISIGVERIFY else: # it's p2pkh pos, pubkey = decode_data_push(redeemScript, pos) if p2sh_is_segwit: source = script.pubkey_to_p2whash(pubkey) else: source = script.pubkey_to_pubkeyhash(pubkey) valid_sig = redeemScript[pos] == bitcoinlib.core.script.OP_CHECKSIGVERIFY pos += 1 if valid_sig: uniqueOffsetLength = 0 for i in range(pos+1, len(redeemScript)): if redeemScript[i] == bitcoinlib.core.script.OP_DROP: uniqueOffsetLength = i-pos-1 break redeem_script_is_valid = redeemScript[pos + 1 + uniqueOffsetLength] == bitcoinlib.core.script.OP_DROP and \ redeemScript[pos + 2 + uniqueOffsetLength] == bitcoinlib.core.script.OP_DEPTH and \ redeemScript[pos + 3 + uniqueOffsetLength] == 0 and \ redeemScript[pos + 4 + uniqueOffsetLength] == bitcoinlib.core.script.OP_EQUAL except Exception as e: pass #traceback.print_exc() return pubkey, source, redeem_script_is_valid, found_data def make_p2sh_encoding_redeemscript(datachunk, n, pubKey=None, multisig_pubkeys=None, multisig_pubkeys_required=None): _logger = logger.getChild('p2sh_encoding') assert len(datachunk) <= bitcoinlib.core.script.MAX_SCRIPT_ELEMENT_SIZE dataDropScript = [datachunk, bitcoinlib.core.script.OP_DROP] # just drop the data chunk cleanupScript = [n, bitcoinlib.core.script.OP_DROP, bitcoinlib.core.script.OP_DEPTH, 0, bitcoinlib.core.script.OP_EQUAL] # unique offset + prevent scriptSig malleability if pubKey is not None: # a p2pkh script looks like this: {pubkey} OP_CHECKSIGVERIFY verifyOwnerScript = [pubKey, bitcoinlib.core.script.OP_CHECKSIGVERIFY] elif multisig_pubkeys_required is not None and multisig_pubkeys: # a 2-of-3 multisig looks like this: # 2 {pubkey1} {pubkey2} {pubkey3} 3 OP_CHECKMULTISIGVERIFY multisig_pubkeys_required = int(multisig_pubkeys_required) if multisig_pubkeys_required < 2 or multisig_pubkeys_required > 15: raise exceptions.TransactionError('invalid multisig pubkeys value') verifyOwnerScript = [multisig_pubkeys_required] for multisig_pubkey in multisig_pubkeys: verifyOwnerScript.append(multisig_pubkey) verifyOwnerScript = verifyOwnerScript + [len(multisig_pubkeys), bitcoinlib.core.script.OP_CHECKMULTISIGVERIFY] else: raise exceptions.TransactionError('Either pubKey or multisig pubKeys must be provided') #redeemScript = CScript(datachunk) + CScript(dataDropScript + verifyOwnerScript + cleanupScript) redeemScript = CScript(dataDropScript + verifyOwnerScript + cleanupScript) _logger.debug('datachunk %s' % (binascii.hexlify(datachunk))) _logger.debug('dataDropScript %s (%s)' % (repr(CScript(dataDropScript)), binascii.hexlify(CScript(dataDropScript)))) _logger.debug('verifyOwnerScript %s (%s)' % (repr(CScript(verifyOwnerScript)), binascii.hexlify(CScript(verifyOwnerScript)))) _logger.debug('entire redeemScript %s (%s)' % (repr(redeemScript), binascii.hexlify(redeemScript))) #scriptSig = CScript([]) + redeemScript # PUSH(datachunk) + redeemScript scriptSig = CScript([redeemScript]) outputScript = redeemScript.to_p2sh_scriptPubKey() _logger.debug('scriptSig %s (%s)' % (repr(scriptSig), binascii.hexlify(scriptSig))) _logger.debug('outputScript %s (%s)' % (repr(outputScript), binascii.hexlify(outputScript))) # outputScript looks like OP_HASH160 {{ hash160([redeemScript]) }} OP_EQUALVERIFY # redeemScript looks like OP_DROP {{ pubkey }} OP_CHECKSIGVERIFY {{ n }} OP_DROP OP_DEPTH 0 OP_EQUAL # scriptSig is {{ datachunk }} OP_DROP {{ pubkey }} OP_CHECKSIGVERIFY {{ n }} OP_DROP OP_DEPTH 0 OP_EQUAL return scriptSig, redeemScript, outputScript def make_standard_p2sh_multisig_script(multisig_pubkeys, multisig_pubkeys_required): # a 2-of-3 multisig looks like this: # 2 {pubkey1} {pubkey2} {pubkey3} 3 OP_CHECKMULTISIG multisig_pubkeys_required = int(multisig_pubkeys_required) multisig_script = [multisig_pubkeys_required] for multisig_pubkey in multisig_pubkeys: multisig_script.append(multisig_pubkey) multisig_script = multisig_script + [len(multisig_pubkeys), bitcoinlib.core.script.OP_CHECKMULTISIG] return multisig_script #### serializer.py """ Construct and serialize the Bitcoin transactions that are Counterparty transactions. This module contains no consensus‐critical code. """ import os import sys import binascii import json import hashlib import re import time import decimal import logging logger = logging.getLogger(__name__) import requests import bitcoin as bitcoinlib from bitcoin.core import Hash160 from bitcoin.core.script import CScript from bitcoin.wallet import P2PKHBitcoinAddress, P2SHBitcoinAddress import cachetools from counterpartylib.lib import config from counterpartylib.lib import exceptions from counterpartylib.lib import util from counterpartylib.lib import script from counterpartylib.lib import backend from counterpartylib.lib import arc4 from counterpartylib.lib.transaction_helper import p2sh_encoding from bitcoin.bech32 import CBech32Data # Constants OP_RETURN = b'\x6a' OP_PUSHDATA1 = b'\x4c' OP_DUP = b'\x76' OP_HASH160 = b'\xa9' OP_EQUALVERIFY = b'\x88' OP_CHECKSIG = b'\xac' OP_0 = b'\x00' OP_1 = b'\x51' OP_2 = b'\x52' OP_3 = b'\x53' OP_CHECKMULTISIG = b'\xae' OP_EQUAL = b'\x87' D = decimal.Decimal UTXO_LOCKS = None UTXO_LOCKS_PER_ADDRESS_MAXSIZE = 5000 # set higher than the max number of UTXOs we should expect to # manage in an aging cache for any one source address, at any one period def var_int (i): if i < 0xfd: return (i).to_bytes(1, byteorder='little') elif i <= 0xffff: return b'\xfd' + (i).to_bytes(2, byteorder='little') elif i <= 0xffffffff: return b'\xfe' + (i).to_bytes(4, byteorder='little') else: return b'\xff' + (i).to_bytes(8, byteorder='little') def op_push (i): if i < 0x4c: return (i).to_bytes(1, byteorder='little') # Push i bytes. elif i <= 0xff: return b'\x4c' + (i).to_bytes(1, byteorder='little') # OP_PUSHDATA1 elif i <= 0xffff: return b'\x4d' + (i).to_bytes(2, byteorder='little') # OP_PUSHDATA2 else: return b'\x4e' + (i).to_bytes(4, byteorder='little') # OP_PUSHDATA4 def get_script(address): if script.is_multisig(address): return get_multisig_script(address) elif script.is_bech32(address): return get_p2w_script(address) else: try: return get_monosig_script(address) except script.VersionByteError as e: return get_p2sh_script(address) def get_multisig_script(address): # Unpack multi‐sig address. signatures_required, pubkeys, signatures_possible = script.extract_array(address) # Required signatures. if signatures_required == 1: op_required = OP_1 elif signatures_required == 2: op_required = OP_2 elif signatures_required == 3: op_required = OP_3 else: raise script.InputError('Required signatures must be 1, 2 or 3.') # Required signatures. # Note 1-of-1 addresses are not supported (they don't go through extract_array anyway). if signatures_possible == 2: op_total = OP_2 elif signatures_possible == 3: op_total = OP_3 else: raise script.InputError('Total possible signatures must be 2 or 3.') # Construct script. tx_script = op_required # Required signatures for public_key in pubkeys: public_key = binascii.unhexlify(public_key) tx_script += op_push(len(public_key)) # Push bytes of public key tx_script += public_key # Data chunk (fake) public key tx_script += op_total # Total signatures tx_script += OP_CHECKMULTISIG # OP_CHECKMULTISIG return (tx_script, None) def get_monosig_script(address): # Construct script. pubkeyhash = script.base58_check_decode(address, config.ADDRESSVERSION) tx_script = OP_DUP # OP_DUP tx_script += OP_HASH160 # OP_HASH160 tx_script += op_push(20) # Push 0x14 bytes tx_script += pubkeyhash # pubKeyHash tx_script += OP_EQUALVERIFY # OP_EQUALVERIFY tx_script += OP_CHECKSIG # OP_CHECKSIG return (tx_script, None) def get_p2sh_script(address): # Construct script. scripthash = script.base58_check_decode(address, config.P2SH_ADDRESSVERSION) tx_script = OP_HASH160 tx_script += op_push(len(scripthash)) tx_script += scripthash tx_script += OP_EQUAL return (tx_script, None) def get_p2w_script(address): # Construct script. scripthash = bytes(CBech32Data(address)) if len(scripthash) == 20: # P2WPKH encoding tx_script = OP_0 tx_script += b'\x14' tx_script += scripthash witness_script = OP_HASH160 witness_script += op_push(len(scripthash)) witness_script += scripthash witness_script += OP_EQUAL return (tx_script, witness_script) elif len(scripthash) == 32: # P2WSH encoding raise Exception('P2WSH encoding not yet supported') def make_fully_valid(pubkey_start): """Take a too short data pubkey and make it look like a real pubkey. Take an obfuscated chunk of data that is two bytes too short to be a pubkey and add a sign byte to its beginning and a nonce byte to its end. Choose these bytes so that the resulting sequence of bytes is a fully valid pubkey (i.e. on the ECDSA curve). Find the correct bytes by guessing randomly until the check passes. (In parsing, these two bytes are ignored.) """ assert type(pubkey_start) == bytes assert len(pubkey_start) == 31 # One sign byte and one nonce byte required (for 33 bytes). random_bytes = hashlib.sha256(pubkey_start).digest() # Deterministically generated, for unit tests. sign = (random_bytes[0] & 0b1) + 2 # 0x02 or 0x03 nonce = initial_nonce = random_bytes[1] pubkey = b'' while not script.is_fully_valid(pubkey): # Increment nonce. nonce += 1 assert nonce != initial_nonce # Construct a possibly fully valid public key. pubkey = bytes([sign]) + pubkey_start + bytes([nonce % 256]) assert len(pubkey) == 33 return pubkey def serialise(encoding, inputs, destination_outputs, data_output=None, change_output=None, dust_return_pubkey=None): s = (1).to_bytes(4, byteorder='little') # Version use_segwit = False for i in range(len(inputs)): txin = inputs[i] spk = txin['scriptPubKey'] if spk[0:2] == '00': # Witness version 0 datalen = binascii.unhexlify(spk[2:4])[0] if datalen == 20 or datalen == 32: # 20 is for P2WPKH and 32 is for P2WSH if not(use_segwit): s = (2).to_bytes(4, byteorder='little') # Rewrite version use_segwit = True txin['is_segwit'] = True if use_segwit: s += b'\x00' # marker s += b'\x01' # flag # Number of inputs. s += var_int(int(len(inputs))) witness_txins = [] witness_data = {} # List of Inputs. for i in range(len(inputs)): txin = inputs[i] s += binascii.unhexlify(bytes(txin['txid'], 'utf-8'))[::-1] # TxOutHash s += txin['vout'].to_bytes(4, byteorder='little') # TxOutIndex tx_script = binascii.unhexlify(bytes(txin['scriptPubKey'], 'utf-8')) s += var_int(int(len(tx_script))) # Script length s += tx_script # Script s += b'\xff' * 4 # Sequence # Number of outputs. n = 0 n += len(destination_outputs) if data_output: data_array, value = data_output for data_chunk in data_array: n += 1 else: data_array = [] if change_output: n += 1 s += var_int(n) # Destination output. for destination, value in destination_outputs: s += value.to_bytes(8, byteorder='little') # Value tx_script, witness_script = get_script(destination) #if use_segwit and destination in witness_data: # Not deleteing, We will need this for P2WSH # witness_data[destination].append(witness_script) # tx_script = witness_script #if witness_script: # tx_script = witness_script s += var_int(int(len(tx_script))) # Script length s += tx_script # Data output. for data_chunk in data_array: data_array, value = data_output s += value.to_bytes(8, byteorder='little') # Value data_chunk = config.PREFIX + data_chunk # Initialise encryption key (once per output). assert isinstance(inputs[0]['txid'], str) key = arc4.init_arc4(binascii.unhexlify(inputs[0]['txid'])) # Arbitrary, easy‐to‐find, unique key. if encoding == 'multisig': assert dust_return_pubkey # Get data (fake) public key. pad_length = (33 * 2) - 1 - 2 - 2 - len(data_chunk) assert pad_length >= 0 data_chunk = bytes([len(data_chunk)]) + data_chunk + (pad_length * b'\x00') data_chunk = key.encrypt(data_chunk) data_pubkey_1 = make_fully_valid(data_chunk[:31]) data_pubkey_2 = make_fully_valid(data_chunk[31:]) # Construct script. tx_script = OP_1 # OP_1 tx_script += op_push(33) # Push bytes of data chunk (fake) public key (1/2) tx_script += data_pubkey_1 # (Fake) public key (1/2) tx_script += op_push(33) # Push bytes of data chunk (fake) public key (2/2) tx_script += data_pubkey_2 # (Fake) public key (2/2) tx_script += op_push(len(dust_return_pubkey)) # Push bytes of source public key tx_script += dust_return_pubkey # Source public key tx_script += OP_3 # OP_3 tx_script += OP_CHECKMULTISIG # OP_CHECKMULTISIG elif encoding == 'opreturn': data_chunk = key.encrypt(data_chunk) tx_script = OP_RETURN # OP_RETURN tx_script += op_push(len(data_chunk)) # Push bytes of data chunk (NOTE: OP_SMALLDATA?) tx_script += data_chunk # Data elif encoding == 'pubkeyhash': pad_length = 20 - 1 - len(data_chunk) assert pad_length >= 0 data_chunk = bytes([len(data_chunk)]) + data_chunk + (pad_length * b'\x00') data_chunk = key.encrypt(data_chunk) # Construct script. tx_script = OP_DUP # OP_DUP tx_script += OP_HASH160 # OP_HASH160 tx_script += op_push(20) # Push 0x14 bytes tx_script += data_chunk # (Fake) pubKeyHash tx_script += OP_EQUALVERIFY # OP_EQUALVERIFY tx_script += OP_CHECKSIG # OP_CHECKSIG else: raise exceptions.TransactionError('Unknown encoding‐scheme.') s += var_int(int(len(tx_script))) # Script length s += tx_script # Change output. if change_output: change_address, change_value = change_output s += change_value.to_bytes(8, byteorder='little') # Value tx_script, witness_script = get_script(change_address) #print("Change address!", change_address, "\n", witness_data, "\n", tx_script, "\n", witness_script) #if witness_script: #use_segwit and change_address in witness_data: # if not(change_address in witness_data): # witness_data[change_address] = [] # witness_data[change_address].append(witness_script) # tx_script = witness_script # use_segwit = True s += var_int(int(len(tx_script))) # Script length s += tx_script if use_segwit: for i in range(len(inputs)): txin = inputs[i] if txin['is_segwit']: s += b'\x02' s += b'\x00\x00' else: s += b'\x00' s += (0).to_bytes(4, byteorder='little') # LockTime return s def serialise_p2sh_pretx(inputs, source, source_value, data_output, change_output=None, pubkey=None, multisig_pubkeys=None, multisig_pubkeys_required=None): assert data_output # we don't do this unless there's data data_array, data_value = data_output s = (1).to_bytes(4, byteorder='little') # Version # Number of inputs. s += var_int(int(len(inputs))) # List of Inputs. for i in range(len(inputs)): txin = inputs[i] s += binascii.unhexlify(bytes(txin['txid'], 'utf-8'))[::-1] # TxOutHash s += txin['vout'].to_bytes(4, byteorder='little') # TxOutIndex tx_script = binascii.unhexlify(bytes(txin['scriptPubKey'], 'utf-8')) s += var_int(int(len(tx_script))) # Script length s += tx_script # Script s += b'\xff' * 4 # Sequence # Number of outputs. n = len(data_array) if change_output: n += 1 # encode number of outputs s += var_int(n) # P2SH for data encodeded inputs for n, data_chunk in enumerate(data_array): data_chunk = config.PREFIX + data_chunk # prefix the data_chunk # get the scripts scriptSig, redeemScript, outputScript = p2sh_encoding.make_p2sh_encoding_redeemscript(data_chunk, n, pubkey, multisig_pubkeys, multisig_pubkeys_required) #if data_value is an array, then every output fee is specified in it if type(data_value) == list: s += data_value[n].to_bytes(8, byteorder='little') # Value else: s += data_value.to_bytes(8, byteorder='little') # Value s += var_int(int(len(outputScript))) # Script length s += outputScript # Script # Change output. if change_output: change_address, change_value = change_output tx_script, witness_script = get_script(change_address) s += change_value.to_bytes(8, byteorder='little') # Value s += var_int(int(len(tx_script))) # Script length s += tx_script # Script s += (0).to_bytes(4, byteorder='little') # LockTime return s def serialise_p2sh_datatx(txid, source, source_input, destination_outputs, data_output, pubkey=None, multisig_pubkeys=None, multisig_pubkeys_required=None): assert data_output # we don't do this unless there's data txhash = bitcoinlib.core.lx(bitcoinlib.core.b2x(txid)) # reverse txId data_array, value = data_output # version s = (1).to_bytes(4, byteorder='little') # number of inputs is the length of data_array (+1 if a source_input exists) number_of_inputs = len(data_array) if source_input is not None: number_of_inputs += 1 s += var_int(number_of_inputs) # Handle a source input here for a P2SH source if source_input is not None: s += binascii.unhexlify(bytes(source_input['txid'], 'utf-8'))[::-1] # TxOutHash s += source_input['vout'].to_bytes(4, byteorder='little') # TxOutIndex # since pubkey is not returned from indexd, add it from bitcoind source_inputs = backend.ensure_script_pub_key_for_inputs([source_input]) source_input = source_inputs[0] tx_script = binascii.unhexlify(bytes(source_input['scriptPubKey'], 'utf-8')) s += var_int(int(len(tx_script))) # Script length s += tx_script # Script s += b'\xff' * 4 # Sequence # list of inputs for n, data_chunk in enumerate(data_array): data_chunk = config.PREFIX + data_chunk # prefix the data_chunk # get the scripts scriptSig, redeemScript, outputScript = p2sh_encoding.make_p2sh_encoding_redeemscript(data_chunk, n, pubkey, multisig_pubkeys, multisig_pubkeys_required) #substituteScript = scriptSig + outputScript s += txhash # TxOutHash s += (n).to_bytes(4, byteorder='little') # TxOutIndex (assumes 0-based) #s += var_int(len(substituteScript)) # Script length #s += substituteScript # Script s += var_int(len(scriptSig))# + len(outputScript)) # Script length s += scriptSig # Script #s += outputScript # Script s += b'\xff' * 4 # Sequence # number of outputs, always 1 for the opreturn n = 1 n += len(destination_outputs) # encode output length s += var_int(n) # destination outputs for destination, value in destination_outputs: tx_script, witness_script = get_script(destination) s += value.to_bytes(8, byteorder='little') # Value s += var_int(int(len(tx_script))) # Script length s += tx_script # Script # opreturn to signal P2SH encoding key = arc4.init_arc4(txid) data_chunk = config.PREFIX + b'P2SH' data_chunk = key.encrypt(data_chunk) tx_script = OP_RETURN # OP_RETURN tx_script += op_push(len(data_chunk)) # Push bytes of data chunk tx_script += data_chunk # Data # add opreturn s += (0).to_bytes(8, byteorder='little') # Value s += var_int(int(len(tx_script))) # Script length s += tx_script # Script s += (0).to_bytes(4, byteorder='little') # LockTime return s #### __init__.py