Skip to content

Commit

Permalink
v1.3.1 (#127)
Browse files Browse the repository at this point in the history
  • Loading branch information
cryptosharks131 authored Aug 29, 2022
1 parent c63c065 commit ec90205
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 23 deletions.
30 changes: 15 additions & 15 deletions gui/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def home(request):
funding_txid = target_resp[i].channel.channel_point.split(':')[0]
output_index = target_resp[i].channel.channel_point.split(':')[1]
updated = pending_changes.filter(funding_txid=funding_txid,output_index=output_index).exists()
item['alias'] = peers.filter(pubkey=target_resp[i].channel.remote_node_pub)[0].alias if peers.filter(pubkey=target_resp[i].channel.remote_node_pub).exists() else None
item['alias'] = peers.filter(pubkey=target_resp[i].channel.remote_node_pub)[0].alias if peers.filter(pubkey=target_resp[i].channel.remote_node_pub).exists() else ''
item['remote_node_pub'] = target_resp[i].channel.remote_node_pub
item['channel_point'] = target_resp[i].channel.channel_point
item['funding_txid'] = funding_txid
Expand Down Expand Up @@ -205,10 +205,10 @@ def home(request):
onchain_txs = Onchain.objects.all()
onchain_costs_7day = 0 if onchain_txs.filter(time_stamp__gte=filter_7day).count() == 0 else onchain_txs.filter(time_stamp__gte=filter_7day).aggregate(Sum('fee'))['fee__sum']
onchain_costs_1day = 0 if onchain_txs.filter(time_stamp__gte=filter_1day).count() == 0 else onchain_txs.filter(time_stamp__gte=filter_1day).aggregate(Sum('fee'))['fee__sum']
closures_7day = Closures.objects.filter(close_height__gte=(node_info.block_height - 1008))
closures_1day = Closures.objects.filter(close_height__gte=(node_info.block_height - 144))
close_fees_7day = channels.filter(chan_id__in=closures_7day.values('chan_id')).aggregate(Sum('closing_costs'))['closing_costs__sum'] if closures_7day.exists() else 0
close_fees_1day = channels.filter(chan_id__in=closures_1day.values('chan_id')).aggregate(Sum('closing_costs'))['closing_costs__sum'] if closures_1day.exists() else 0
closures_7day = channels.filter(chan_id__in=Closures.objects.filter(close_height__gte=(node_info.block_height - 1008)).values('chan_id'))
closures_1day = channels.filter(chan_id__in=Closures.objects.filter(close_height__gte=(node_info.block_height - 144)).values('chan_id'))
close_fees_7day = closures_7day.aggregate(Sum('closing_costs'))['closing_costs__sum'] if closures_7day.exists() else 0
close_fees_1day = closures_1day.aggregate(Sum('closing_costs'))['closing_costs__sum'] if closures_1day.exists() else 0
onchain_costs_7day += close_fees_7day
onchain_costs_1day += close_fees_1day
total_costs_7day = total_7day_fees + onchain_costs_7day
Expand Down Expand Up @@ -756,11 +756,11 @@ def income(request):
onchain_txs_30day = onchain_txs.filter(time_stamp__gte=filter_30day)
onchain_txs_7day = onchain_txs.filter(time_stamp__gte=filter_7day)
onchain_txs_1day = onchain_txs.filter(time_stamp__gte=filter_1day)
closures = Closures.objects.all()
closures_90day = closures.filter(close_height__gte=(node_info.block_height - 12960))
closures_30day = closures.filter(close_height__gte=(node_info.block_height - 4320))
closures_7day = closures.filter(close_height__gte=(node_info.block_height - 1008))
closures_1day = closures.filter(close_height__gte=(node_info.block_height - 144))
closures = channels.filter(chan_id__in=Closures.objects.all().values('chan_id'))
closures_90day = channels.filter(chan_id__in=Closures.objects.filter(close_height__gte=(node_info.block_height - 12960)).values('chan_id'))
closures_30day = channels.filter(chan_id__in=Closures.objects.filter(close_height__gte=(node_info.block_height - 4320)).values('chan_id'))
closures_7day = channels.filter(chan_id__in=Closures.objects.filter(close_height__gte=(node_info.block_height - 1008)).values('chan_id'))
closures_1day = channels.filter(chan_id__in=Closures.objects.filter(close_height__gte=(node_info.block_height - 144)).values('chan_id'))
forwards = Forwards.objects.all()
forwards_90day = forwards.filter(forward_date__gte=filter_90day)
forwards_30day = forwards.filter(forward_date__gte=filter_30day)
Expand Down Expand Up @@ -806,11 +806,11 @@ def income(request):
onchain_costs_30day = 0 if onchain_txs_30day.count() == 0 else onchain_txs_30day.aggregate(Sum('fee'))['fee__sum']
onchain_costs_7day = 0 if onchain_txs_7day.count() == 0 else onchain_txs_7day.aggregate(Sum('fee'))['fee__sum']
onchain_costs_1day = 0 if onchain_txs_1day.count() == 0 else onchain_txs_1day.aggregate(Sum('fee'))['fee__sum']
close_fees = channels.filter(chan_id__in=closures.values('chan_id')).aggregate(Sum('closing_costs'))['closing_costs__sum'] if closures.exists() else 0
close_fees_90day = channels.filter(chan_id__in=closures_90day.values('chan_id')).aggregate(Sum('closing_costs'))['closing_costs__sum'] if closures_90day.exists() else 0
close_fees_30day = channels.filter(chan_id__in=closures_30day.values('chan_id')).aggregate(Sum('closing_costs'))['closing_costs__sum'] if closures_30day.exists() else 0
close_fees_7day = channels.filter(chan_id__in=closures_7day.values('chan_id')).aggregate(Sum('closing_costs'))['closing_costs__sum'] if closures_7day.exists() else 0
close_fees_1day = channels.filter(chan_id__in=closures_1day.values('chan_id')).aggregate(Sum('closing_costs'))['closing_costs__sum'] if closures_1day.exists() else 0
close_fees = closures.aggregate(Sum('closing_costs'))['closing_costs__sum'] if closures.exists() else 0
close_fees_90day = closures_90day.aggregate(Sum('closing_costs'))['closing_costs__sum'] if closures_90day.exists() else 0
close_fees_30day = closures_30day.aggregate(Sum('closing_costs'))['closing_costs__sum'] if closures_30day.exists() else 0
close_fees_7day = closures_7day.aggregate(Sum('closing_costs'))['closing_costs__sum'] if closures_7day.exists() else 0
close_fees_1day = closures_1day.aggregate(Sum('closing_costs'))['closing_costs__sum'] if closures_1day.exists() else 0
onchain_costs += close_fees
onchain_costs_90day += close_fees_90day
onchain_costs_30day += close_fees_30day
Expand Down
18 changes: 10 additions & 8 deletions jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -405,31 +405,33 @@ def reconnect_peers(stub):
peers = Peers.objects.all()
for inactive_peer in inactive_peers:
if peers.filter(pubkey=inactive_peer).exists():
peer = peers.filter(pubkey=inactive_peer)[0]
peer = peers.filter(pubkey=inactive_peer)[0]
if peer.last_reconnected == None or (int((datetime.now() - peer.last_reconnected).total_seconds() / 60) > 2):
print (f"{datetime.now().strftime('%c')} : Reconnecting {peer.alias=} {peer.pubkey=} {peer.last_reconnected=}")
if peer.connected == True:
print('Inactive channel is still connected to peer, disconnecting peer...')
print (f"{datetime.now().strftime('%c')} : ... Inactive channel is still connected to peer, disconnecting peer. {peer.alias=} {inactive_peer=}")
stub.DisconnectPeer(ln.DisconnectPeerRequest(pub_key=inactive_peer))
peer.connected = False
peer.save()
print('Attempting connection to:', inactive_peer)
try:
node = stub.GetNodeInfo(ln.NodeInfoRequest(pub_key=inactive_peer, include_channels=False)).node
host = node.addresses[0].addr
except:
print('Unable to find node info on graph, using last known value')
print (f"{datetime.now().strftime('%c')} : ... Unable to find node info on graph, using last known value {peer.alias=} {peer.pubkey=} {peer.address=}")
host = peer.address
address = ln.LightningAddress(pubkey=inactive_peer, host=host)
print (f"{datetime.now().strftime('%c')} : ... Attempting connection to {peer.alias=} {inactive_peer=} {host=}")
try:
stub.ConnectPeer(request = ln.ConnectPeerRequest(addr=address, perm=True, timeout=5))
peer.last_reconnected = datetime.now()
peer.save()
response = stub.ConnectPeer(request = ln.ConnectPeerRequest(addr=address, perm=False, timeout=5))
print (f"{datetime.now().strftime('%c')} : .... Status {peer.alias=} {inactive_peer=} {response=}")
except Exception as e:
error = str(e)
details_index = error.find('details =') + 11
debug_error_index = error.find('debug_error_string =') - 3
error_msg = error[details_index:debug_error_index]
print (f"{datetime.now().strftime('%c')} : Error reconnecting {inactive_peer=} {error_msg=}")
print (f"{datetime.now().strftime('%c')} : .... Error reconnecting {peer.alias} {inactive_peer=} {error_msg=}")
peer.last_reconnected = datetime.now()
peer.save()

def clean_payments(stub):
if LocalSettings.objects.filter(key='LND-CleanPayments').exists():
Expand Down

0 comments on commit ec90205

Please sign in to comment.