11-13 5 views
通过“class BTC”获取到btc实时交易信息:市值,价格,总量,交易量,挖矿难度,主网拥堵情况;
通过“class BTCHolderCrawler”获取btc钱包的实时资金量
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
import urllib.request import ssl from bs4 import BeautifulSoup import logging logger = logging.getLogger(__name__) class BTC: def __init__(self): self.user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)' self.context = ssl._create_unverified_context() self.headers = {'User-Agent': self.user_agent, 'Content-Type': 'application/json'} self.stories = [] self.url = 'https://api.blockchain.info/stats' def get_btc_stats(self): try: request = urllib.request.Request(self.url, headers=self.headers) response = urllib.request.urlopen(request, context=self.context) return response.read().decode('utf-8', 'ignore') except urllib.request.URLError as e: if hasattr(e, "reason"): logger.error(u"连接blockchain api失败,错误原因", e.reason) return None class BTCHolderCrawler: def __init__(self): self.pageIndex = 1 self.user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)' self.context = ssl._create_unverified_context() self.headers = {'User-Agent': self.user_agent} self.stories = [] self.enable = False self.url = 'https://bitinfocharts.com/top-100-richest-bitcoin-addresses.html' def get_page(self): try: request = urllib.request.Request(self.url, headers=self.headers) response = urllib.request.urlopen(request, context=self.context, timeout=40) page = response.read().decode('utf-8', 'ignore') return page except urllib.request.URLError as e: if hasattr(e, "reason"): logger.error(u"连接bitinfocharts.com失败,错误原因", e.reason) return None @staticmethod def get_page_items(page): if not page: logger.error("页面加载失败...") return None soup = BeautifulSoup(page, 'html.parser') tb1 = soup.findAll(name='table', attrs={'class': 'table table-striped bb abtb'}) trs1 = tb1[0].find_all('tr') tb2 = soup.findAll(name='table', attrs={'class': 'table table-striped bb'}) trs2 = tb2[0].find_all('tr') trs = trs1 + trs2 return trs |
返回结果1
1 |
{"timestamp":1.542092693E12,"market_price_usd":6380.03,"hash_rate":4.714230930157459E10,"total_fees_btc":1843221810,"n_btc_mined":165000000000,"n_tx":287100,"n_blocks_mined":132,"minutes_between_blocks":10.2137,"totalbc":1737363750000000,"n_blocks_total":549891,"estimated_transaction_volume_usd":7.100063938710359E8,"blocks_size":142778902,"miners_revenue_usd":1.064464760444454E7,"nextretarget":550367,"difficulty":7184404942701,"estimated_btc_sent":11128574534462,"miners_revenue_btc":1668,"total_btc_sent":84179061071156,"trade_volume_btc":36429.74,"trade_volume_usd":2.3242283409219998E8} |
返回结果2
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
Balance = re.findall(partent, trs[i].find_all('td')[2].get_text().strip()) Balance_btc = Balance[0][0] Balance_usd = Balance[0][1] holder_list['Rank'] = int(trs[i].find_all('td')[0].get_text().strip()) holder_list['Address'] = trs[i].find_all('td')[1].get_text().strip().replace('wallet', ' ', 1) holder_list['Balance'] = trs[i].find_all('td')[2].get_text().strip() holder_list['Balance_btc'] = float(Balance_btc.replace(',', '')) holder_list['Balance_usd'] = float(Balance_usd.replace(',', '')) holder_list['Percentage'] = float(trs[i].find_all('td')[3].get_text().replace('%', '')) holder_list['First_in'] = trs[i].find_all('td')[4].get_text().strip() holder_list['Last_in'] = trs[i].find_all('td')[5].get_text().strip() ins = trs[i].find_all('td')[6].get_text().strip().replace(',', '') if ins: holder_list['Num_ins'] = int(ins) else: holder_list['Num_ins'] = 0 holder_list['First_out'] = trs[i].find_all('td')[7].get_text().strip() holder_list['Last_out'] = trs[i].find_all('td')[8].get_text().strip() outs = trs[i].find_all('td')[9].get_text().strip().replace(',', '') if outs: holder_list['Num_outs'] = int(outs) else: holder_list['Num_outs'] = 0 holder_list['@timestamp'] = datetime.now().isoformat() a = copy.deepcopy(holder_list) holder.append(a) |
如果想赏钱,可以用微信扫描下面的二维码,一来能刺激我写博客的欲望,二来好维护云主机的费用; 另外再次标注博客原地址 itnotebooks.com 感谢!
