1
0

Merge pull request #9551 from f321x/swap_nostr_event_type

Change nostr swap event type, add PoW, rebroadcast events
This commit is contained in:
ThomasV
2025-02-12 10:31:54 +01:00
committed by GitHub
4 changed files with 161 additions and 21 deletions

View File

@@ -1246,7 +1246,8 @@ class ElectrumWindow(QMainWindow, MessageBoxMixin, Logger, QtEventListener):
def descr(x):
last_seen = util.age(x['timestamp'])
return f"pubkey={x['pubkey'][0:10]}, fee={x['percentage_fee']}% + {x['reverse_mining_fee']} sats"
server_keys = [(x['pubkey'], descr(x)) for x in recent_offers]
pow_sorted_offers = sorted(recent_offers, key=lambda x: x['pow_bits'], reverse=True)
server_keys = [(x['pubkey'], descr(x)) for x in pow_sorted_offers]
msg = '\n'.join([
_("Please choose a server from this list."),
_("Note that fees may be updated frequently.")

View File

@@ -1197,11 +1197,15 @@ Warning: setting this to too low will result in lots of payment failures."""),
SWAPSERVER_FEE_MILLIONTHS = ConfigVar('swapserver_fee_millionths', default=5000, type_=int)
TEST_SWAPSERVER_REFUND = ConfigVar('test_swapserver_refund', default=False, type_=bool)
SWAPSERVER_NPUB = ConfigVar('swapserver_npub', default=None, type_=str)
SWAPSERVER_ANN_POW_NONCE = ConfigVar('swapserver_ann_pow_nonce', default=0, type_=int)
SWAPSERVER_POW_TARGET = ConfigVar('swapserver_pow_target', default=30, type_=int)
# nostr
NOSTR_RELAYS = ConfigVar(
'nostr_relays',
default='wss://nos.lol,wss://relay.damus.io,wss://brb.io,wss://nostr.mom',
default='wss://nos.lol,wss://relay.damus.io,wss://brb.io,wss://nostr.mom,'
'wss://relay.primal.net,wss://ftp.halifax.rwth-aachen.de/nostr,'
'wss://eu.purplerelay.com,wss://nostr.einundzwanzig.space',
type_=str,
short_desc=lambda: _("Nostr relays"),
long_desc=lambda: ' '.join([

View File

@@ -1,6 +1,7 @@
import asyncio
import json
import os
import ssl
from typing import TYPE_CHECKING, Optional, Dict, Union, Sequence, Tuple, Iterable
from decimal import Decimal
import math
@@ -13,6 +14,7 @@ import electrum_ecc as ecc
from electrum_ecc import ECPrivkey
import electrum_aionostr as aionostr
from electrum_aionostr.event import Event
from electrum_aionostr.util import to_nip19
from collections import defaultdict
@@ -24,7 +26,8 @@ from .bitcoin import (script_to_p2wsh, opcodes,
construct_witness)
from .transaction import PartialTxInput, PartialTxOutput, PartialTransaction, Transaction, TxInput, TxOutpoint
from .transaction import script_GetOp, match_script_against_template, OPPushDataGeneric, OPPushDataPubkey
from .util import log_exceptions, ignore_exceptions, BelowDustLimit, OldTaskGroup, age
from .util import (log_exceptions, ignore_exceptions, BelowDustLimit, OldTaskGroup, age, ca_path,
gen_nostr_ann_pow, get_nostr_ann_pow_amount)
from .lnutil import REDEEM_AFTER_DOUBLE_SPENT_DELAY
from .bitcoin import dust_threshold, DummyAddress
from .logging import Logger
@@ -231,6 +234,7 @@ class SwapManager(Logger):
@log_exceptions
async def run_nostr_server(self):
await self.set_nostr_proof_of_work()
with NostrTransport(self.config, self, self.lnworker.nostr_keypair) as transport:
await transport.is_connected.wait()
self.logger.info(f'nostr is connected')
@@ -238,7 +242,7 @@ class SwapManager(Logger):
# todo: publish everytime fees have changed
self.server_update_pairs()
await transport.publish_offer(self)
await asyncio.sleep(600)
await asyncio.sleep(transport.OFFER_UPDATE_INTERVAL_SEC)
@log_exceptions
async def main_loop(self):
@@ -265,6 +269,23 @@ class SwapManager(Logger):
keypair = self.lnworker.nostr_keypair if self.is_server else generate_random_keypair()
return NostrTransport(self.config, self, keypair)
async def set_nostr_proof_of_work(self) -> None:
current_pow = get_nostr_ann_pow_amount(
self.lnworker.nostr_keypair.pubkey[1:],
self.config.SWAPSERVER_ANN_POW_NONCE
)
if current_pow >= self.config.SWAPSERVER_POW_TARGET:
self.logger.debug(f"Reusing existing PoW nonce for nostr announcement.")
return
self.logger.info(f"Generating PoW for nostr announcement. Target: {self.config.SWAPSERVER_POW_TARGET}")
nonce, pow_amount = await gen_nostr_ann_pow(
self.lnworker.nostr_keypair.pubkey[1:], # pubkey without prefix
self.config.SWAPSERVER_POW_TARGET,
)
self.logger.debug(f"Found {pow_amount} bits of work for Nostr announcement.")
self.config.SWAPSERVER_ANN_POW_NONCE = nonce
async def pay_invoice(self, key):
self.logger.info(f'trying to pay invoice {key}')
self.invoices_to_pay[key] = 1000000000000 # lock
@@ -1299,9 +1320,9 @@ class NostrTransport(Logger):
# (todo: we should use onion messages for that)
NOSTR_DM = 4
NOSTR_SWAP_OFFER = 10943
NOSTR_EVENT_TIMEOUT = 60*60*24
NOSTR_EVENT_VERSION = 1
USER_STATUS_NIP38 = 30315
NOSTR_EVENT_VERSION = 2
OFFER_UPDATE_INTERVAL_SEC = 60 * 10
def __init__(self, config, sm, keypair):
Logger.__init__(self)
@@ -1313,7 +1334,8 @@ class NostrTransport(Logger):
self.nostr_private_key = to_nip19('nsec', keypair.privkey.hex())
self.nostr_pubkey = keypair.pubkey.hex()[2:]
self.dm_replies = defaultdict(asyncio.Future) # type: Dict[bytes, asyncio.Future]
self.relay_manager = aionostr.Manager(self.relays, private_key=self.nostr_private_key)
ssl_context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=ca_path)
self.relay_manager = aionostr.Manager(self.relays, private_key=self.nostr_private_key, log=self.logger, ssl_context=ssl_context)
self.taskgroup = OldTaskGroup()
self.is_connected = asyncio.Event()
self.server_relays = None
@@ -1384,9 +1406,6 @@ class NostrTransport(Logger):
async def publish_offer(self, sm):
assert self.sm.is_server
offer = {
"type": "electrum-swap",
"version": self.NOSTR_EVENT_VERSION,
'network': constants.net.NET_NAME,
'percentage_fee': sm.percentage,
'normal_mining_fee': sm.normal_fee,
'reverse_mining_fee': sm.lockup_fee,
@@ -1394,13 +1413,19 @@ class NostrTransport(Logger):
'min_amount': sm._min_amount,
'max_amount': sm._max_amount,
'relays': sm.config.NOSTR_RELAYS,
'pow_nonce': hex(sm.config.SWAPSERVER_ANN_POW_NONCE),
}
self.logger.info(f'publishing swap offer..')
# the first value of a single letter tag is indexed and can be filtered for
tags = [['d', f'electrum-swapserver-{self.NOSTR_EVENT_VERSION}'],
['r', 'net:' + constants.net.NET_NAME],
['expiration', str(int(time.time() + self.OFFER_UPDATE_INTERVAL_SEC + 10))]]
event_id = await aionostr._add_event(
self.relay_manager,
kind=self.NOSTR_SWAP_OFFER,
kind=self.USER_STATUS_NIP38,
tags=tags,
content=json.dumps(offer),
private_key=self.nostr_private_key)
self.logger.info(f"published offer {event_id}")
async def send_direct_message(self, pubkey: str, relays, content: str) -> str:
event_id = await aionostr._add_event(
@@ -1422,15 +1447,22 @@ class NostrTransport(Logger):
async def receive_offers(self):
await self.is_connected.wait()
query = {"kinds": [self.NOSTR_SWAP_OFFER], "limit":10}
query = {
"kinds": [self.USER_STATUS_NIP38],
"limit":10,
"#d": [f"electrum-swapserver-{self.NOSTR_EVENT_VERSION}"],
"#r": [f"net:{constants.net.NET_NAME}"],
"since": int(time.time()) - self.OFFER_UPDATE_INTERVAL_SEC
}
async for event in self.relay_manager.get_events(query, single_event=False, only_stored=False):
try:
content = json.loads(event.content)
tags = {k: v for k, v in event.tags}
except Exception as e:
continue
if content.get('version') != self.NOSTR_EVENT_VERSION:
if tags.get('d') != f"electrum-swapserver-{self.NOSTR_EVENT_VERSION}":
continue
if content.get('network') != constants.net.NET_NAME:
if tags.get('r') != f"net:{constants.net.NET_NAME}":
continue
# check if this is the most recent event for this pubkey
pubkey = event.pubkey
@@ -1438,24 +1470,44 @@ class NostrTransport(Logger):
if event.created_at <= ts:
#print('skipping old event', pubkey[0:10], event.id)
continue
try:
pow_bits = get_nostr_ann_pow_amount(
bytes.fromhex(pubkey),
int(content.get('pow_nonce', "0"), 16)
)
except ValueError:
continue
if pow_bits < self.config.SWAPSERVER_POW_TARGET:
self.logger.debug(f"too low pow: {pubkey}: pow: {pow_bits} nonce: {content.get('pow_nonce', 0)}")
continue
content['pow_bits'] = pow_bits
content['pubkey'] = pubkey
content['timestamp'] = event.created_at
self.offers[pubkey] = content
# mirror event to other relays
#await man.add_event(event, check_response=False)
server_relays = content['relays'].split(',') if 'relays' in content else []
await self.taskgroup.spawn(self.rebroadcast_event(event, server_relays))
async def get_pairs(self):
if self.config.SWAPSERVER_NPUB is None:
return
query = {"kinds": [self.NOSTR_SWAP_OFFER], "authors": [self.config.SWAPSERVER_NPUB], "limit":1}
query = {
"kinds": [self.USER_STATUS_NIP38],
"authors": [self.config.SWAPSERVER_NPUB],
"#d": [f"electrum-swapserver-{self.NOSTR_EVENT_VERSION}"],
"#r": [f"net:{constants.net.NET_NAME}"],
"since": int(time.time()) - self.OFFER_UPDATE_INTERVAL_SEC,
"limit": 1
}
async for event in self.relay_manager.get_events(query, single_event=True, only_stored=False):
try:
content = json.loads(event.content)
except Exception as e:
tags = {k: v for k, v in event.tags}
except Exception:
continue
if content.get('version') != self.NOSTR_EVENT_VERSION:
if tags.get('d') != f"electrum-swapserver-{self.NOSTR_EVENT_VERSION}":
continue
if content.get('network') != constants.net.NET_NAME:
if tags.get('r') != f"net:{constants.net.NET_NAME}":
continue
# check if this is the most recent event for this pubkey
pubkey = event.pubkey
@@ -1466,6 +1518,21 @@ class NostrTransport(Logger):
self.sm.update_pairs(pairs)
self.server_relays = content['relays'].split(',')
async def rebroadcast_event(self, event: Event, server_relays: Sequence[str]):
"""If the relays of the origin server are different from our relays we rebroadcast the
event to our relays so it gets spread more widely."""
if not server_relays:
return
rebroadcast_relays = [relay for relay in self.relay_manager.relays if
relay.url not in server_relays]
for relay in rebroadcast_relays:
try:
res = await relay.add_event(event, check_response=True)
except Exception as e:
self.logger.debug(f"failed to rebroadcast event to {relay.url}: {e}")
continue
self.logger.debug(f"rebroadcasted event to {relay.url}: {res}")
@log_exceptions
async def check_direct_messages(self):
privkey = aionostr.key.PrivateKey(self.private_key)

View File

@@ -25,6 +25,7 @@ import concurrent.futures
import logging
import os, sys, re, json
from collections import defaultdict, OrderedDict
from concurrent.futures.process import ProcessPoolExecutor
from typing import (NamedTuple, Union, TYPE_CHECKING, Tuple, Optional, Callable, Any,
Sequence, Dict, Generic, TypeVar, List, Iterable, Set, Awaitable)
from datetime import datetime, timezone
@@ -34,6 +35,7 @@ import traceback
import urllib
import threading
import hmac
import hashlib
import stat
import locale
import asyncio
@@ -2116,3 +2118,69 @@ def truncate_text(text: str, *, max_len: Optional[int]) -> str:
return text
else:
return text[:max_len] + f"... (truncated. orig_len={len(text)})"
def nostr_pow_worker(nonce, nostr_pubk, target_bits, hash_function, hash_len_bits, shutdown):
"""Function to generate PoW for Nostr, to be spawned in a ProcessPoolExecutor."""
hash_preimage = b'electrum-' + nostr_pubk
while True:
# we cannot check is_set on each iteration as it has a lot of overhead, this way we can check
# it with low overhead (just the additional range counter)
for i in range(1000000):
digest = hash_function(hash_preimage + nonce.to_bytes(32, 'big')).digest()
if int.from_bytes(digest, 'big') < (1 << (hash_len_bits - target_bits)):
shutdown.set()
return hash, nonce
nonce += 1
if shutdown.is_set():
return None, None
async def gen_nostr_ann_pow(nostr_pubk: bytes, target_bits: int) -> Tuple[int, int]:
"""Generate a PoW for a Nostr announcement. The PoW is hash[b'electrum-'+pubk+nonce]"""
import multiprocessing # not available on Android, so we import it here
hash_function = hashlib.sha256
hash_len_bits = 256
max_nonce: int = (1 << (32 * 8)) - 1 # 32-byte nonce
start_nonce = 0
max_workers = max(multiprocessing.cpu_count() - 1, 1) # use all but one CPU
manager = multiprocessing.Manager()
shutdown = manager.Event()
with ProcessPoolExecutor(max_workers=max_workers) as executor:
tasks = []
loop = asyncio.get_running_loop()
for task in range(0, max_workers):
task = loop.run_in_executor(
executor,
nostr_pow_worker,
start_nonce,
nostr_pubk,
target_bits,
hash_function,
hash_len_bits,
shutdown
)
tasks.append(task)
start_nonce += max_nonce // max_workers # split the nonce range between the processes
if start_nonce > max_nonce: # make sure we don't go over the max_nonce
start_nonce = random.randint(0, int(max_nonce * 0.75))
done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)
hash_res, nonce_res = done.pop().result()
executor.shutdown(wait=False, cancel_futures=True)
return nonce_res, get_nostr_ann_pow_amount(nostr_pubk, nonce_res)
def get_nostr_ann_pow_amount(nostr_pubk: bytes, nonce: Optional[int]) -> int:
"""Return the amount of leading zero bits for a nostr announcement PoW."""
if not nonce:
return 0
hash_function = hashlib.sha256
hash_len_bits = 256
hash_preimage = b'electrum-' + nostr_pubk
digest = hash_function(hash_preimage + nonce.to_bytes(32, 'big')).digest()
digest = int.from_bytes(digest, 'big')
return hash_len_bits - digest.bit_length()