From 66f3cbe4017dfd0dbd0d98c3adcb77ee542820e3 Mon Sep 17 00:00:00 2001 From: ag Date: Fri, 22 Nov 2024 20:34:39 -0700 Subject: [PATCH] Prototype fast forward --- .gitignore | 12 + pyvenv.cfg | 4 +- requirements.txt | 3 + src/.piermesh | 5 +- src/Actions/Actions.py | 0 src/Actions/__init__.py | 0 src/Components/hopper.py | 10 +- src/Config/Args.py | 7 + src/Config/Context.py | 22 + src/Config/__init__.py | 0 src/Cryptography/WhaleSong.py | 192 +++++--- src/Daisy/Cache.py | 15 +- src/Daisy/Catch.py | 42 +- src/Daisy/CryptographyUtil.py | 45 +- src/Daisy/Daisy.py | 48 +- src/Daisy/Index.py | 31 +- src/Daisy/Store.py | 57 ++- src/Packets/HeaderPacket.py | 26 +- src/Packets/Message.py | 117 +++-- .../Messages/Protocols/barnacle/Barnacle.py | 0 .../Protocols/barnacle/README.DNTTPAB.md | 1 + .../Messages/Protocols/barnacle/__init__.py | 0 .../Messages/Protocols/bubble/Bubble.py | 27 ++ .../Messages/Protocols/catch/IndexSync.py | 4 +- .../Messages/Protocols/catch/Request.py | 4 +- .../Messages/Protocols/catch/Response.py | 4 +- .../Protocols/cryptography/Handshake.py | 34 +- .../Messages/Protocols/hopper/Request.py | 3 +- .../Messages/Protocols/hopper/Response.py | 10 +- .../Messages/Protocols/map/Announce.py | 36 ++ src/Packets/Packet.py | 12 +- src/Packets/SubMessage.py | 43 +- src/Services/ToDo.py | 27 ++ src/Siph/map.py | 15 +- src/Splash/res/css/style.css | 24 +- src/Splash/res/img/hopperdisplay.png | Bin 0 -> 707 bytes src/Splash/res/js/custom.js | 164 +++++++ src/Splash/serve.py | 414 +++++++++++------- src/Splash/templates/index/index.html | 27 +- src/Splash/templates/shared/base.html | 4 +- src/Splash/templates/shared/catch.editor.html | 46 +- src/Splash/templates/shared/catch.nav.html | 89 ++-- src/Splash/templates/shared/hopper.html | 61 ++- src/Splash/templates/shared/messenger.html | 38 +- src/Splash/templates/shared/p2chat.html | 22 + src/Splash/templates/shared/plank.html | 18 + src/Sponge/Protocols/Yellow.py | 184 ++++++++ src/Sponge/Protocols/__init__.py | 10 + src/Sponge/Protocols/bubble.py | 21 +- src/Sponge/Protocols/catch.py | 33 +- src/Sponge/Protocols/cryptography.py | 18 +- src/Sponge/Protocols/daisy.py | 9 + src/Sponge/Protocols/hopper.py | 19 +- src/Sponge/Protocols/map.py | 29 +- src/Sponge/base.py | 183 ++++---- src/Transceiver/Transceiver.py | 201 ++++++--- src/run.py | 347 +++++++++------ src/scripts/falin | 2 +- src/setup.fish | 1 + src/setup.sh | 1 + src/stale/router.py | 4 +- src/tlog.py | 174 ++++++++ src/tui.py | 0 src/ui.py | 6 +- 64 files changed, 2116 insertions(+), 889 deletions(-) create mode 100644 src/Actions/Actions.py create mode 100644 src/Actions/__init__.py create mode 100644 src/Config/Args.py create mode 100644 src/Config/Context.py create mode 100644 src/Config/__init__.py create mode 100644 src/Packets/Messages/Protocols/barnacle/Barnacle.py create mode 100644 src/Packets/Messages/Protocols/barnacle/README.DNTTPAB.md create mode 100644 src/Packets/Messages/Protocols/barnacle/__init__.py create mode 100644 src/Packets/Messages/Protocols/bubble/Bubble.py create mode 100644 src/Packets/Messages/Protocols/map/Announce.py create mode 100644 src/Splash/res/img/hopperdisplay.png create mode 100644 src/Splash/res/js/custom.js create mode 100644 src/Splash/templates/shared/p2chat.html create mode 100644 src/Splash/templates/shared/plank.html create mode 100644 src/Sponge/Protocols/Yellow.py create mode 100644 src/Sponge/Protocols/daisy.py create mode 100644 src/setup.fish create mode 100644 src/setup.sh create mode 100644 src/tlog.py create mode 100644 src/tui.py diff --git a/.gitignore b/.gitignore index b8c1466b..39800cec 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,15 @@ src/Splash/res/js/node_modules/ src/daisy/ src/catch/ src/logs/ +zims/ +project.matrix +piermesh.nvim +piermesh +node00/* +node00/ +*.index +*.log +*.vim +*.workspace +.workspace +.workspace.backup diff --git a/pyvenv.cfg b/pyvenv.cfg index 0537ffc0..0f94b2ca 100644 --- a/pyvenv.cfg +++ b/pyvenv.cfg @@ -1,3 +1,5 @@ home = /usr/bin include-system-site-packages = false -version = 3.10.12 +version = 3.11.2 +executable = /usr/bin/python3.11 +command = /usr/bin/python3 -m venv /home/ag/Documents/piermesh diff --git a/requirements.txt b/requirements.txt index 9c3a10cd..90d17952 100755 --- a/requirements.txt +++ b/requirements.txt @@ -14,3 +14,6 @@ textual textual-dev sphinx-markdown-builder==0.6.6 pycryptodome +pyjwt +uvloop +python-lsp-server[all] diff --git a/src/.piermesh b/src/.piermesh index c1139ce5..4e2e5242 100644 --- a/src/.piermesh +++ b/src/.piermesh @@ -6,8 +6,9 @@ WebUIPort = 5000 ShowTUI = True [OPERATOR_REQUIRED] -# TransceiverPort = /dev/ttyACM0 +TransceiverPort = /dev/ttyACM0 +PSK = jgf765!FS0+6 -# DO YOUR SETTINGS HERE +# DO YOUR NON REQUIRED SETTINGS HERE [OPERATOR_OVERRIDES] diff --git a/src/Actions/Actions.py b/src/Actions/Actions.py new file mode 100644 index 00000000..e69de29b diff --git a/src/Actions/__init__.py b/src/Actions/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/Components/hopper.py b/src/Components/hopper.py index 41bfcc2e..9896404a 100755 --- a/src/Components/hopper.py +++ b/src/Components/hopper.py @@ -7,9 +7,11 @@ import msgpack import lzma import base64 import mimetypes +import logging from Packets.Messages.Protocols.hopper.Response import HopperResponse +logger = logging.getLogger("__main__." + __name__) def downloadFile(url, text=True, mimeType=None): fbytes = b"" @@ -45,12 +47,18 @@ def get(url: str, params=None, followTags=None): followTags None or list of tags to download the src/href from """ + logger.debug("Hopping to it") + # TODO: Non blocking requests + # WARN: Do not run self requests until this is fixed r = requests.get(url, params=params) + logger.debug("Content retrieved, parsing") r = { "response": r.text, "code": r.status_code, "content-type": r.headers.get("content-type"), } + logger.debug("Done parsing") + # TODO: Reject followtags if content type is other then html if followTags != None: soup = BeautifulSoup(r["response"], "html.parser") @@ -71,7 +79,7 @@ def get(url: str, params=None, followTags=None): elem["src"] = "" elem.string = script r["response"] = soup.text - + logger.debug("Done hopping") return r diff --git a/src/Config/Args.py b/src/Config/Args.py new file mode 100644 index 00000000..32642b7f --- /dev/null +++ b/src/Config/Args.py @@ -0,0 +1,7 @@ + + +def byFile(): + pass + +def byPrompt(): + pass diff --git a/src/Config/Context.py b/src/Config/Context.py new file mode 100644 index 00000000..9cff904b --- /dev/null +++ b/src/Config/Context.py @@ -0,0 +1,22 @@ + + +class Context: + def __init__(self, subsets: dict={}, **kwargs): + # Subsets should be a dict of list of value keys + self.ctx = {} + self.subsets = subsets + for key, val in kwargs.items(): + self.ctx[key] = { + "val": val, + "type": type(val) + } + + def __getitem__(self, idx): + return self.ctx[idx] + + def getSubset(self, subset): + if subset in self.subsets.keys(): + res = {} + for key in self.subsets[subset]: + res[key] = self.ctx[key]["val"] + return res diff --git a/src/Config/__init__.py b/src/Config/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/Cryptography/WhaleSong.py b/src/Cryptography/WhaleSong.py index 07d6c1d6..96606cb5 100755 --- a/src/Cryptography/WhaleSong.py +++ b/src/Cryptography/WhaleSong.py @@ -1,6 +1,7 @@ import base64 import os import lzma +import logging import msgpack @@ -11,6 +12,9 @@ from Crypto.Cipher import AES from Daisy.Store import Store + +logger = logging.getLogger("__main__." + __name__) + # TODO: Different store directories per node # TODO: First time psk transport initiation # Add this credential manually, its picked up and used when the two nodes try to communicate before the session is encrypted @@ -45,7 +49,7 @@ class Transport: Private key for node """ - def __init__(self, cache, nodeNickname, cLog): + def __init__(self, cache, nodeNickname, daisyCryptography, psk): """ Parameters ---------- @@ -59,20 +63,36 @@ class Transport: Reference to `run.Node.cLog` """ - self.cLog = cLog self.stores = {} - self.loadedKeys = {} self.nodeNickname = nodeNickname self.cache = cache + self.daisyCryptography = daisyCryptography if os.path.exists("daisy/cryptography/{0}/key".format(nodeNickname)) == False: - self.cLog(20, "Key store DNE, initializing") + logger.log(20, "Key store DNE, initializing") self.initStore("key") - self.genKeyPair() else: - self.cLog(20, "Key store exists, loading") - self.stores["key"] = Store("key", "cryptography", nodeNickname) - self.cLog(20, "Store loaded") - self.cLog(20, "Key store initialized") + logger.log(20, "Key store exists, loading") + self.stores["key"] = Store( + "key", "cryptography", nodeNickname, daisyCryptography + ) + logger.log(20, "Store loaded") + logger.log(20, "Key store initialized") + srecord = self.getRecord("key", "self") + if srecord == False: + self.stores["key"].createEmpty("self") + self.stores["key"].update( + "self", + {"PSK": self.daisyCryptography.pad(psk).encode("utf-8")}, + write=False + ) + if "publicKey" not in self.getRecord("key", "self").keys(): + self.addPublickey(None, None, forSelf=True) + else: + self.stores["key"].update("self", { + "publicKey": ECC.import_key( + self.getRecord("key", "self")["publicKey"] + ) + }, write=False) def kdf(self, bytesX): return SHAKE128.new(bytesX).read(32) @@ -90,10 +110,10 @@ class Transport: if store == "param": return nodeID in self.loadedParams.keys() elif store == "key": - isExists = nodeID in self.loadedKeys.keys() - if isExists: + record = self.getRecord("key", nodeID) + if record != False: for field in checkFieldsExist: - if not (field in self.loadedKeys[nodeID].keys()): + if not (field in record.keys()): if field == "staticKey": self.genStaticKey(nodeID) elif field == "ourEphemeralKey": @@ -105,15 +125,16 @@ class Transport: """ r = self.getRecord(store, nodeID) if r == False: - self.cLog( + logger.log( 30, "Tried to load nonexistent {0} for node {1}".format(store, nodeID) ) return False elif self.checkInMem(store, nodeID): - self.cLog(10, "{0}s already deserialized, skipping".format(store)) + logger.log(10, "{0}s already deserialized, skipping".format(store)) else: if store == "param": self.loadedParams[nodeID] = self.loadParamBytes(r) + """ elif store == "key": self.loadedKeys[nodeID] = { "publicKey": Serialization.load_pem_public_key(r["publicKey"]), @@ -121,31 +142,35 @@ class Transport: r["privateKey"], None ), } + """ return True - def getRecord(self, store: str, key: str): + def getRecord(self, store: str, key: str, ephemeral=False): """ Get record from store: store with key: key """ - r = stores[store].getRecord(key) + r = self.stores[store].getRecord(key, ephemeral=ephemeral) if r == False: - self.cLog(20, "Record does not exist") + logger.log(20, "Record does not exist") return False else: return r + # TODO: Fix stores, URGENT def initStore(self, store: str): """ Initialize store: store """ - self.stores[store] = Store(store, "cryptography", self.nodeNickname) + self.stores[store] = Store( + store, "cryptography", self.nodeNickname, self.daisyCryptography + ) if store == "param": self.genParams() self.stores[store].update("self", self.getParamsBytes(), recur=False) elif store == "key": self.stores[store].update("self", {}, recur=False) else: - self.cLog(30, "Store not defined") + logger.log(30, "Store not defined") def genStaticKey(self, onodeID, paramsOverride=False): """ @@ -166,33 +191,78 @@ class Transport: ) }, ) - self.loadedKeys[onodeID] = {"staticKey": staticKey} + self.stores["key"].update(onodeID, {"staticKey": staticKey}, write=False) def genOurEphemeralKey(self, onodeID): ourEphemeralKey = ECC.generate(curve="p256") - self.loadedKeys[onodeID]["ourEphemeralKey"] = ourEphemeralKey + self.stores["key"].update(onodeID, {"ourEphemeralKey": ourEphemeralKey}, write=False) - def addPublickey(self, onodeID, publicKey): - self.stores["key"].update(onodeID, {"publicKey": publicKey}) - self.loadedKeys[onodeID]["publicKey"] = ECC.import_key(publicKey) + def addPublickey(self, onodeID, publicKey, forSelf=False): + if forSelf: + publicKey = ECC.generate(curve="p256") + self.stores["key"].update("self", { + "publicKey": publicKey.export_key( + format="PEM", + prot_params={"iteration_count": 131072} + )}) + self.stores["key"].update("self", { + "publicKey": publicKey + }, + write=False) + else: + # TODO: Fix stores + # self.stores["key"].update(onodeID, {"publicKey": publicKey}) + logger.log(20, "Importing keys") + record = self.getRecord("key", onodeID) + if record == False: + self.stores["key"].createEmpty(onodeID) + self.stores["key"].update(onodeID, {"publicKey": publicKey}) + self.stores["key"].update(onodeID, {"publicKey": ECC.import_key(publicKey)}, write=False) def addPeerEphemeralKey(self, onodeID, peerEphemeralKey): - self.loadedKeys[onodeID]["peerEphemeralKey"] = ECC.import_key(peerEphemeralKey) + self.stores["key"].update(onodeID, {"peerEphemeralKey": ECC.import_key(peerEphemeralKey)}, write=False) - def sessionSetup(self, onodeID, publicKey, peerEphemeralKey): + def sessionSetup(self, onodeID, peerEphemeralKey): # TODO: Deeper checking before loading + # TODO: Loading existing records if self.getRecord("key", onodeID) == False: - self.stores["key"].createEmpty(onodeID) - self.genStaticKey(onodeID) - self.genOurEphemeralKey(onodeID) + logger.log(30, "No record, waiting for announce") else: - self.loadRecordToMem("key", onodeID) - self.addPublickey(onodeID, publicKey) - self.addPeerEphemeralKey(onodeID, peerEphemeralKey) - self.generateSessionKey(onodeID) + self.addPeerEphemeralKey(onodeID, peerEphemeralKey) + self.generateSessionKey(onodeID) def generateSessionKey(self, onodeID): - keysOb = self.loadedKeys[onodeID] + # TODO: Gen static key if not exists + # TODO: Gen our ephemeral key if not exists + keysOb = self.getRecord("key", onodeID, ephemeral=True) + if ("publicKey" not in keysOb) or ("staticKey" not in keysOb): + dkeysOb = self.getRecord("key", onodeID) + if ("publicKey" not in keysOb): + self.stores["key"].update( + onodeID, { + "publicKey": ECC.import_key( + dkeysOb["publicKey"] + ) + }, write=False + ) + if ("staticKey" not in keysOb): + self.stores["key"].update( + onodeID, { + "staticKey": ECC.import_key( + dkeysOb["staticKey"] + ) + }, write=False + ) + keysOb = self.getRecord("key", onodeID, ephemeral=True) + reget = False + if "staticKey" not in keysOb: + self.genStaticKey(onodeID) + reget = True + if "ourEphemeralKey" not in keysOb: + self.genOurEphemeralKey(onodeID) + reget = True + if reget: + keysOb = self.getRecord("key", onodeID, ephemeral=True) sessionKey = key_agreement( static_priv=keysOb["staticKey"], static_pub=keysOb["publicKey"], @@ -200,11 +270,11 @@ class Transport: eph_pub=keysOb["peerEphemeralKey"], kdf=self.kdf, ) - self.loadedKeys[onodeID]["sessionKey"] = sessionKey + self.stores["key"].update(onodeID, {"sessionKey": sessionKey}, write=False) return sessionKey # TODO: Build in transport security (node/node) - def encrypt(self, data, nodeID: str, isDict: bool = True): + def encrypt(self, data, nodeID: str, isDict: bool = True, pskEncrypt=False): """ Do Fernet encryption @@ -214,11 +284,21 @@ class Transport: isDict: bool Whether data is a dictionary """ - if (nodeID in self.loadedKeys.keys()) == False: - self.cLog(20, "Node {0} not in keychain".format(nodeID)) + if nodeID == "-00001" or pskEncrypt: + cipher = AES.new(self.getRecord("key", "self", ephemeral=True)["PSK"], AES.MODE_GCM) + nonce = cipher.nonce + if isDict: + data = msgpack.dumps(data) + ciphertext, tag = cipher.encrypt_and_digest(data) + return (ciphertext, nonce, tag) + elif (self.getRecord("key", nodeID)) == False: + logger.log(30, "Node {0} not in keychain".format(nodeID)) return False else: - r = self.loadedKeys[nodeID] + r = self.getRecord("key", nodeID, ephemeral=True) + if r == False: + r = self.getRecord("key", "self", ephemeral=True) + logger.info(r) if "sessionKey" in r.keys(): sessionKey = r["sessionKey"] cipher = AES.new(sessionKey, AES.MODE_GCM) @@ -226,41 +306,51 @@ class Transport: if isDict: data = msgpack.dumps(data) ciphertext, tag = cipher.encrypt_and_digest(data) - return (nonce, ciphertext, tag) + return (ciphertext, nonce, tag) elif "PSK" in r.keys(): cipher = AES.new(r["PSK"], AES.MODE_GCM) nonce = cipher.nonce if isDict: data = msgpack.dumps(data) ciphertext, tag = cipher.encrypt_and_digest(data) - return (nonce, ciphertext, tag) + return (ciphertext, nonce, tag) else: - self.cLog(20, "Node {0} does not have session key".format(nodeID)) + logger.log(20, "Node {0} does not have session key".format(nodeID)) def decrypt(self, data, nodeID: str, nonce, tag): """ Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load) """ - if (nodeID in self.loadedKeys.keys()) == False: - self.cLog(20, "Node {0} not in keychain".format(nodeID)) - return False + # TODO: Handling existing record + record = self.getRecord("key", nodeID, ephemeral=True) + if (record == False) or ("sessionKey" not in record.keys()): + cipher = AES.new(self.getRecord("key", "self", ephemeral=True)["PSK"], AES.MODE_GCM, nonce=nonce) + + data = cipher.decrypt(data) + logger.log(10, data) + #data = msgpack.loads(data) + data = msgpack.loads(lzma.decompress(data)) + logger.log(10, "Decrypt/deserialize output") + logger.log(10, data) + return data + # logger.log(20, "Node {0} not in keychain".format(nodeID)) + # return False else: - r = self.loadedKeys[nodeID] - if "sessionKey" in r.keys(): - sessionKey = r["sessionKey"] + if "sessionKey" in record.keys(): + sessionKey = record["sessionKey"] cipher = AES.new(sessionKey, AES.MODE_GCM, nonce=nonce) data = cipher.decrypt(data) data = msgpack.loads(lzma.decompress(data)) return data - elif "PSK" in r.keys(): - cipher = AES.new(r["PSK"], AES.MODE_GCM, nonce=nonce) + elif "PSK" in record.keys(): + cipher = AES.new(record["PSK"], AES.MODE_GCM, nonce=nonce) data = cipher.decrypt(data) data = msgpack.loads(lzma.decompress(data)) return data else: - self.cLog(20, "Node {0} does not have session key".format(nodeID)) + logger.log(20, "Node {0} does not have session key".format(nodeID)) return False diff --git a/src/Daisy/Cache.py b/src/Daisy/Cache.py index 161daf41..7029e986 100755 --- a/src/Daisy/Cache.py +++ b/src/Daisy/Cache.py @@ -18,6 +18,7 @@ class Cache: def __init__( self, + daisyCryptography, filepaths=None, cacheFile=None, path: str = "daisy", @@ -42,25 +43,29 @@ class Cache: isCatch: bool Whether this cache is for catchs """ + self.daisyCryptography = daisyCryptography self.data = {} self.path = path + if not os.path.exists(self.path): + os.makedirs(self.path) + if filepaths != None: for fp in filepaths: fp = path + "/" + fp if os.path.isfile(fp): - self.data[fp] = Daisy(fp) + self.data[fp] = Daisy(fp, daisyCryptography) elif cacheFile != None: with open(cacheFile, "r") as f: for fp in f.read().split("\n"): - self.data[fp] = Daisy(fp) + self.data[fp] = Daisy(fp, daisyCryptography) elif walk: for root, dirs, files in os.walk(self.path): for p in dirs + files: if not (".json" in p): if not (".md" in p): tpath = root + "/" + p - self.data[tpath] = Daisy(tpath) + self.data[tpath] = Daisy(tpath, daisyCryptography) def create(self, path: str, data: dict, remote=False): """ @@ -78,7 +83,7 @@ class Cache: with open(self.path + "/" + path, "wb") as f: f.write(msgpack.dumps(data)) # logging.log(10, "Done creating record") - self.data[path] = Daisy(self.path + "/" + path) + self.data[path] = Daisy(self.path + "/" + path, self.daisyCryptography) # logging.log(10, "Done loading to Daisy") return self.data[path] else: @@ -96,7 +101,7 @@ class Cache: return self.data[path] else: if os.path.exists(self.path + "/" + path): - self.data[path] = Daisy(self.path + "/" + path) + self.data[path] = Daisy(self.path + "/" + path, self.daisyCryptography) return self.data[path] else: # logging.log(10, "File does not exist") diff --git a/src/Daisy/Catch.py b/src/Daisy/Catch.py index f62c409b..aa72d104 100755 --- a/src/Daisy/Catch.py +++ b/src/Daisy/Catch.py @@ -18,13 +18,23 @@ class Catch(Cache): catches = {} def __init__( - self, path: str = "catch", filepaths=None, catchFile=None, walk: bool = False + self, + daisyCryptography, + path: str = "catch", + filepaths=None, + catchFile=None, + walk: bool = False, ): """ Basically the same initialization parameters as Catch """ super().__init__( - filepaths=filepaths, cacheFile=catchFile, path=path, walk=walk, isCatch=True + daisyCryptography, + filepaths=filepaths, + cacheFile=catchFile, + path=path, + walk=walk, + isCatch=True, ) # TODO: Fins @@ -56,31 +66,35 @@ class Catch(Cache): r = self.search({"head": head, "body": body, "fins": fins}) else: r = self.search({"head": head, "body": body}) - return r[0][1]["html"] + if len(r) < 1: + return False + else: + return r[0][1]["html"] def addc(self, peer, node, seperator, head, body, data, fins=None, remote=False): - tnpath = "catch/" + node - if os.path.exists(tnpath) != True: - os.makedirs(tnpath) + tnpath = f"catch/{node}" + if os.path.exists(self.path + "/" + tnpath) != True: + os.makedirs(self.path + "/" + tnpath) tppath = tnpath + "/" + peer - if os.path.exists(tppath) != True: - os.makedirs(tppath) + if os.path.exists(self.path + "/" + tppath) != True: + os.makedirs(self.path + "/" + tppath) sid = str(random.randrange(0, 999999)).zfill(6) data["seperator"] = seperator data["head"] = head data["body"] = body if fins != None: data["fins"] = fins - res = self.create("{0}/{1}/{2}".format(node, peer, sid), data, remote=remote) + res = self.create("{0}/{1}".format(tppath, sid), data, remote=remote) return [sid, res] def genIndex(self, onodeID): dirList = [] - for k in self.data.keys(): - curCatch = {"path": k, "resNodeID": onodeID, "sid": str(uuid.uuid4())} - curCatch += self.data[k] - del curCatch["data"] - dirList.append(curCatch) + for k, v in self.data.items(): + curCatch = {"remoteNode": onodeID} + if type(v.msg) != str: + curCatch = curCatch | v.msg + del curCatch["html"] + dirList.append(curCatch) return dirList def mergeIndex(self, remoteIndex): diff --git a/src/Daisy/CryptographyUtil.py b/src/Daisy/CryptographyUtil.py index 25469fff..a9423dd6 100644 --- a/src/Daisy/CryptographyUtil.py +++ b/src/Daisy/CryptographyUtil.py @@ -1,33 +1,46 @@ from Crypto.Cipher import AES import traceback +import logging + + +logger = logging.getLogger("__main__." + __name__) class SteelPetal: - def __init__(self, key, cLog, nonce=None, testData=None): - self.cLog = cLog - if nonce == None: - self.cipher = AES.new(key, AES.MODE_GCM) - self.nonce = self.cipher.nonce - else: - self.cipher = AES.new(key, AES.MODE_GCM, nonce=nonce) - self.nonce = nonce - if testData != None: - try: - self.cipher.decrypt(testData) - except: - self.cLog(20, traceback.format_exec()) - return False + def __init__(self, key, nonce=None, testData=None): + try: + if nonce == None: + self.cipher = AES.new(self.pad(key).encode("utf-8"), AES.MODE_GCM) + self.nonce = self.cipher.nonce + else: + self.cipher = AES.new( + self.pad(key).encode("utf-8"), AES.MODE_GCM, nonce=nonce + ) + self.nonce = nonce + if testData != None: + try: + self.cipher.decrypt(testData) + except: + logger.log(30, traceback.format_exc()) + return False + except: + logger.log(30, traceback.format_exc()) + + def pad(self, key): + BS = AES.block_size + key = key + (BS - len(key) % BS) * chr(BS - len(key) % BS) + return key def encrypt(self, data): try: return self.cipher.encrypt_and_digest(data) except: - self.cLog(20, traceback.format_exec()) + logger.log(20, traceback.format_exec()) return False def decrypt(self, data): try: return self.cipher.decrypt(data) except: - self.cLog(20, traceback.format_exec()) + logger.log(20, traceback.format_exec()) return False diff --git a/src/Daisy/Daisy.py b/src/Daisy/Daisy.py index 8e2c51cd..3b3ce53d 100755 --- a/src/Daisy/Daisy.py +++ b/src/Daisy/Daisy.py @@ -1,12 +1,14 @@ import os import json import msgpack +import logging # TODO: delete # TODO: propagate json changes to msgpack automatically # TODO: propagate msgpack changes to cache automatically # TODO: Indexing +logger = logging.getLogger("__main__." + __name__) def _json_to_msg(path: str): """ @@ -82,10 +84,14 @@ class Daisy: f.write(msgpack.dumps(t)) self.msg = t else: - print("No such template as: " + template) + logger.log(20, "No such template as: " + template) else: - f.write(msgpack.dumps({})) - self.msg = {} + t = {} + if prefillDict != False: + for k in prefillDict.keys(): + t[k] = prefillDict[k] + f.write(msgpack.dumps(t)) + self.msg = t elif os.path.isdir(filepath): self.msg = "directory" else: @@ -172,26 +178,18 @@ class Daisy: else: return None + def json_to_msg(self, path: str): + """ + Convert json at the path plus .json to a msgpack binary -def loadTemplates(templatePath: str = "templates"): - """Load templates for prefilling records - - Parameters - ---------- - templatePath: str - Path to templates - """ - templates = {} - for p in os.listdir(templatePath): - p = templatePath + "/" + p - if os.path.isdir(p): - for ip in os.listdir(p): - ip = p + "/" + ip - if os.path.isdir(ip): - print("Too deep, skipping: " + ip) - else: - templates[ip] = Daisy(ip) - else: - templates[p] = Daisy(p) - self.templates = templates - return templates + Parameters + ---------- + path: str + Path to json minus the extension + """ + rpath = path + ".json" + res = b"" + with open(rpath) as f: + res = msgpack.dumps(json.load(f)) + with open(path, "wb") as f: + f.write(res) diff --git a/src/Daisy/Index.py b/src/Daisy/Index.py index 79f4c4e3..6ac820db 100644 --- a/src/Daisy/Index.py +++ b/src/Daisy/Index.py @@ -2,7 +2,15 @@ from Daisy.Daisy import Daisy class Index(Daisy): - def __init__(self, nodeNickname, prefill=[], indexedFields=[], autoIndex=True): + def __init__( + self, + nodeNickname, + daisyCryptography, + prefill=[], + indexedFields=[], + autoIndex=True, + ): + # TODO: Load from disk if autoIndex: if prefill != []: if indexedFields == []: @@ -14,11 +22,14 @@ class Index(Daisy): indexedFields = list(set(indexedFields)) super().__init__( nodeNickname + ".index", + daisyCryptography, prefillDict={"_index": prefill, "_fields": indexedFields}, ) def addEntry(self, entry): - self.write(override=entry) + index = self.msg["_index"] + index.append(entry) + self.write(override={"_index": index}) def search(self, keydict: dict, strict: bool = True): """ @@ -31,22 +42,16 @@ class Index(Daisy): Whether to require values match """ results = [] - for key, val in self.data["_index"].items(): - val = val.get() - if strict and type(val) != str: + for ob in self.msg["_index"]: + if strict and type(ob) != str: addcheck = False for k, v in keydict.items(): - if k in val.keys(): - if v in val[k]: + if k in ob.keys(): + if v in ob[k]: addcheck = True else: addcheck = False break if addcheck: - results.append([key, val]) - elif type(val) != str: - for k, v in keydict.items(): - if k in val.keys(): - if v in val[k]: - results.append([key, val]) + results.append(ob) return results diff --git a/src/Daisy/Store.py b/src/Daisy/Store.py index 5c91421f..cf9b78cb 100755 --- a/src/Daisy/Store.py +++ b/src/Daisy/Store.py @@ -1,7 +1,12 @@ from Daisy.Daisy import Daisy import os +import logging +import traceback +logger = logging.getLogger("__main__." + __name__) + +# TODO: Higher priority erros class Store(Daisy): """ @@ -10,27 +15,51 @@ class Store(Daisy): `🔗 Source `__ """ - def __init__(self, store: str, path: str, nodeNickname: str): + def __init__(self, store: str, path: str, nodeNickname: str, daisyCryptography): fpath = "daisy/{0}/{1}".format(path, nodeNickname) cpath = "{0}/{1}/{2}".format(path, nodeNickname, store) if not os.path.exists(fpath): os.mkdir(fpath) - super().__init__("daisy/" + cpath) + super().__init__("daisy/" + cpath, daisyCryptography) + self.ephemeral = {} def createEmpty(self, key): self.msg[key] = {} - def update(self, entry: str, data, recur: bool = True): - if recur: - for key in data.keys(): - self.msg[entry][key] = data[key] + # TODO: Update usages of update where necessary to keep out of mem + def update(self, entry: str, data, recur: bool = True, write=True): + if write: + if recur: + if entry not in self.msg.keys(): + self.createEmpty(entry) + for key in data.keys(): + self.msg[entry][key] = data[key] + else: + self.msg[entry] = data + self.write() else: - self.msg[entry] = data - self.write() + if recur: + if entry not in self.ephemeral.keys(): + self.ephemeral[entry] = {} + for key in data.keys(): + self.ephemeral[entry][key] = data[key] + else: + self.ephemeral[entry] = data - def getRecord(self, key: str): - if key in self.get().keys(): - return self.get()[key] - else: - self.cLog(20, "Record does not exist") - return False + def getRecord(self, key: str, ephemeral=False): + logger.log(30, key) + try: + if ephemeral: + if key in self.ephemeral.keys(): + return self.ephemeral[key] + else: + logger.log(20, "Record does not exist") + return False + else: + if key in self.get().keys(): + return self.get()[key] + else: + logger.log(20, "Record does not exist") + return False + except Exception: + logger.log(30, traceback.format_exc()) diff --git a/src/Packets/HeaderPacket.py b/src/Packets/HeaderPacket.py index ab05f38c..2fb6f36c 100755 --- a/src/Packets/HeaderPacket.py +++ b/src/Packets/HeaderPacket.py @@ -42,30 +42,36 @@ class Header(Packet): sourceNode: int, recipient: int, recipientNode: int, - nonce, subpacket: bool = False, wantFullResponse: bool = False, packetsClass: int = 0, pAction: int = -1, + target=True, ): super().__init__( - "", packetsID=packetsID, packetCount=packetCount, packetsClass=packetsClass + b"", packetsID=packetsID, packetCount=packetCount, packetsClass=packetsClass ) + self.target = target self.sender = sender self.senderDisplayName = senderDisplayName - self.recipient = recipient - self.recipientNode = recipientNode - self.subpacket = subpacket + if target: + self.recipient = recipient + self.recipientNode = recipientNode + else: + self.recipient = -1 + self.recipientNode = -1 + # TODO: Populating with submessage ids + self.submessages = [] self.wantFullResponse = wantFullResponse self.pAction = pAction self.sourceNode = sourceNode - self.nonce = nonce + self.packetCount = packetCount - def usePreset(self, path: str): + def usePreset(self, path: str, daisyCryptography): """ Add preset fields to the packet """ - preset = Daisy(path) + preset = Daisy(path, daisyCryptography) for key in preset.get().keys(): self.msg[key] = preset.get()[key] @@ -79,10 +85,10 @@ class Header(Packet): res["sourceNode"] = self.sourceNode res["recipient"] = self.recipient res["recipientNode"] = self.recipientNode - res["subpacket"] = self.subpacket + res["submessages"] = self.submessages res["wantFullResponse"] = self.wantFullResponse res["packetsClass"] = self.packetsClass res["pAction"] = self.pAction - res["nonce"] = self.nonce + res["packetCount"] = self.packetCount return msgpack.dumps(res) diff --git a/src/Packets/Message.py b/src/Packets/Message.py index 09ef60f2..10ca8aee 100755 --- a/src/Packets/Message.py +++ b/src/Packets/Message.py @@ -4,9 +4,11 @@ import lzma import msgpack import random import math +import logging # DO NOT CHANGE DATA SIZE UNLESS YOU KNOW WHAT YOURE DOING +logger = logging.getLogger("__main__." + __name__) def dict2bytes(cdict: dict): return lzma.compress(msgpack.dumps(cdict)) @@ -37,6 +39,10 @@ class Message: pAction, dataSize: int = 128, wantFullResponse: bool = False, + target=True, + subMessage=False, + primaryMessage=None, + pskEncrypt=False ): # TODO: PSK for usage prior to credentials """ @@ -66,6 +72,11 @@ class Message: packetsClass: int Which protocol the packets are using """ + self.recipientNode = recipientNode + self.target = target + self.subMessage = subMessage + if subMessage: + self.primaryMessage = primaryMessage if isinstance(bytesObject, list): packets = [h.Header(bytesObject[0])] for packet in bytesObject: @@ -82,12 +93,18 @@ class Message: # Data passed in by peers should already have been e2ee encrypted by SubtleCrypto # Transport encryption # bytesObject = lzma.compress(bytesObject, str(recipientNode).zfill(6), isDict=False) - bytesObject, nonce, tag = cryptographyInfo.encrypt( - bytesObject, str(recipientNode).zfill(6), isDict=False - ) + if subMessage == False: + bytesObject, nonce, tag = cryptographyInfo.encrypt( + bytesObject, str(recipientNode).zfill(6), isDict=False, pskEncrypt=pskEncrypt + ) + logger.log(10, bytesObject) + self.nonce = nonce + self.tag = tag packets = [] self.packetsID = random.randrange(0, 999999) pnum = 1 + # if subMessage: + dataSize = 80 blen = math.ceil(len(bytesObject) / dataSize) tb = b"" for it in range(blen): @@ -95,33 +112,52 @@ class Message: b = bytesObject[it * dataSize :] else: b = bytesObject[it * dataSize : (it * dataSize + dataSize)] - packets.append( - p.Packet(b, self.packetsID, pnum, packetsClass=packetsClass) - ) + if subMessage: + packets.append( + p.Packet( + b, + self.packetsID, + pnum, + packetsClass=packetsClass, + primaryMessage=primaryMessage, + ) + ) + else: + packets.append( + p.Packet(b, self.packetsID, pnum, packetsClass=packetsClass) + ) pnum += 1 tb += b - packets.insert( - 0, - h.Header( - self.packetsID, - pnum, - sender, - senderDisplayName, - sourceNode, - recipient, - recipientNode, - nonce, - wantFullResponse=wantFullResponse, - packetsClass=packetsClass, - pAction=pAction, - ), - ) + if subMessage: + pass + else: + packets.insert( + 0, + h.Header( + self.packetsID, + pnum, + sender, + senderDisplayName, + sourceNode, + recipient, + recipientNode, + wantFullResponse=wantFullResponse, + packetsClass=packetsClass, + pAction=pAction, + target=target, + ), + ) + + self.fullPackets = [p for p in packets] + + if subMessage: + pnum -= 1 + for it in range(pnum): packet = msgpack.loads(packets[it].dump()) packet["packetCount"] = pnum packets[it] = msgpack.dumps(packet) - self.packets = packets def get(self) -> list[p.Packet]: @@ -130,18 +166,33 @@ class Message: """ return self.packets - def reassemble(self, completedMessage: dict, cryptographyInfo): + def reassemble(self, completedMessage: dict, cryptographyInfo, subMessage=False, yctx=None, packetCount=None): """ Reassemble packets from a completed message in `Sponge.base` """ data = b"" - for it in range(1, int(completedMessage["packetCount"])): - data += completedMessage["data"][completedMessage["dataOrder"].index(it)] - res = msgpack.loads( - lzma.decompress( - cryptographyInfo.decrypt( - data, completedMessage["sourceNode"], completedMessage["nonce"] - ) + sourceNode = None + # TODO: Fix reassembly for primary + if subMessage: + sourceNode = yctx["sourceNode"]["val"] + for it in range(1, packetCount+1): + data += completedMessage["data"][completedMessage["dataOrder"].index(it)] + data = msgpack.loads(lzma.decompress(data)) + logger.log(10, data) + logger.log(10, completedMessage["data"]) + logger.log(10, completedMessage["dataOrder"]) + else: + # TODO: Cryptography setup + packetCount = int(completedMessage.yctx["packetCount"]["val"]) + sourceNode = completedMessage.yctx["sourceNode"]["val"] + logger.log(10, completedMessage.data) + for it in range(1, packetCount): + if it in completedMessage.dataOrder: + data += completedMessage.data[completedMessage.dataOrder.index(it)] + logger.log(10, "pre decrypt") + logger.log(10, data) + data = cryptographyInfo.decrypt( + data, sourceNode, completedMessage.nonce, completedMessage.tag ) - ) - return res + # data = msgpack.loads(lzma.decompress(data)) + return data diff --git a/src/Packets/Messages/Protocols/barnacle/Barnacle.py b/src/Packets/Messages/Protocols/barnacle/Barnacle.py new file mode 100644 index 00000000..e69de29b diff --git a/src/Packets/Messages/Protocols/barnacle/README.DNTTPAB.md b/src/Packets/Messages/Protocols/barnacle/README.DNTTPAB.md new file mode 100644 index 00000000..c8ad4c96 --- /dev/null +++ b/src/Packets/Messages/Protocols/barnacle/README.DNTTPAB.md @@ -0,0 +1 @@ +# WARNING: DO NOT TRY TO POKE A BARNACLE diff --git a/src/Packets/Messages/Protocols/barnacle/__init__.py b/src/Packets/Messages/Protocols/barnacle/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/Packets/Messages/Protocols/bubble/Bubble.py b/src/Packets/Messages/Protocols/bubble/Bubble.py new file mode 100644 index 00000000..3347a948 --- /dev/null +++ b/src/Packets/Messages/Protocols/bubble/Bubble.py @@ -0,0 +1,27 @@ +from Packets.Message import Message +import Packets.Message + + +class Bubble(Message): + def __init__( + self, + sender, + senderID, + sourceNode, + recipient, + recipientNode, + cryptographyInfo, + data, + ): + bytesOb = Packets.Message.dict2bytes({"data": data, "recipient": recipient, "target": "bubble"}) + super().__init__( + bytesOb, + sender, + senderID, + sourceNode, + recipient, + recipientNode, + cryptographyInfo, + 2, + 0, + ) diff --git a/src/Packets/Messages/Protocols/catch/IndexSync.py b/src/Packets/Messages/Protocols/catch/IndexSync.py index cc7c03a6..e67d93bc 100644 --- a/src/Packets/Messages/Protocols/catch/IndexSync.py +++ b/src/Packets/Messages/Protocols/catch/IndexSync.py @@ -12,6 +12,7 @@ class IndexSync(Message): recipientNode, cryptographyInfo, index, + target=False ): bytesOb = Packets.Message.dict2bytes({"index": index}) super().__init__( @@ -22,6 +23,7 @@ class IndexSync(Message): recipient, recipientNode, cryptographyInfo, - 4, + 1, 2, + target=target ) diff --git a/src/Packets/Messages/Protocols/catch/Request.py b/src/Packets/Messages/Protocols/catch/Request.py index 77d82e30..d9ab13fd 100644 --- a/src/Packets/Messages/Protocols/catch/Request.py +++ b/src/Packets/Messages/Protocols/catch/Request.py @@ -14,8 +14,9 @@ class CatchRequest(Message): head, body, fins, + pskEncrypt=False ): - bytesOb = Packets.Message.dict2bytes({"head": head, "body": body, "fins": fins}) + bytesOb = Packets.Message.dict2bytes({"head": head, "body": body, "fins": fins, "recipient": sender, "recipientNode": sourceNode}) super().__init__( bytesOb, sender, @@ -26,4 +27,5 @@ class CatchRequest(Message): cryptographyInfo, 1, 0, + pskEncrypt=pskEncrypt ) diff --git a/src/Packets/Messages/Protocols/catch/Response.py b/src/Packets/Messages/Protocols/catch/Response.py index a099386c..d31fc51a 100644 --- a/src/Packets/Messages/Protocols/catch/Response.py +++ b/src/Packets/Messages/Protocols/catch/Response.py @@ -12,8 +12,9 @@ class CatchResponse(Message): recipientNode, cryptographyInfo, html, + pskEncrypt=False ): - bytesOb = Packets.Message.dict2bytes({"html": html}) + bytesOb = Packets.Message.dict2bytes({"html": html, "recipient": recipient, "target": "catch"}) super().__init__( bytesOb, sender, @@ -24,4 +25,5 @@ class CatchResponse(Message): cryptographyInfo, 1, 1, + pskEncrypt=pskEncrypt ) diff --git a/src/Packets/Messages/Protocols/cryptography/Handshake.py b/src/Packets/Messages/Protocols/cryptography/Handshake.py index 89bd4071..ce847a19 100644 --- a/src/Packets/Messages/Protocols/cryptography/Handshake.py +++ b/src/Packets/Messages/Protocols/cryptography/Handshake.py @@ -1,34 +1,46 @@ from Packets.Message import Message import Packets.Message +# TODO: Send with psk encryption class Handshake(Message): def __init__( - self, sender, senderID, recipient, recipientNode, cryptographyInfo, onodeID + self, sender, senderID, recipient, recipientNode, cryptographyInfo, onodeID, sourceNode ): publicKey = None ephemeralKey = None - if onodeID in cryptographyInfo.loadedKeys.keys(): - if "staticKey" in cryptographyInfo.loadedKeys[onodeID].keys(): - publicKey = cryptographyInfo.loadedKeys[onodeID]["staticKey"] + record = cryptographyInfo.getRecord("key", "self") + if record != False: + if "publicKey" in record.keys(): + publicKey = record["publicKey"] else: - cryptographyInfo.genStaticKey(onodeID) - publicKey = cryptographyInfo.loadedKeys[onodeID]["staticKey"] - if "ourEphemeralKey" in cryptographyInfo.loadedKeys[onodeID].keys(): - ephemeralKey = cryptographyInfo.loadedKeys[onodeID]["ourEphemeralKey"] + raise Exception("Public key missing for node") + if "ourEphemeralKey" in record.keys(): + ephemeralKey = record["ourEphemeralKey"] else: cryptographyInfo.genOurEphemeralKey(onodeID) - ephemeralKey = cryptographyInfo.loadedKeys[onodeID]["ourEphemeralKey"] + record = cryptographyInfo.getRecord("key", onodeID, ephemeral=True) + ephemeralKey = record["ourEphemeralKey"].export_key( + format="PEM", + prot_params={"iteration_count": 131072} + ) + if "staticKey" not in record.keys(): + cryptographyInfo.genStaticKey(onodeID) + else: + raise Exception("Node does not exist") bytesOb = Packets.Message.dict2bytes( - {"publicKey": publicKey, "ephemeralKey": ephemeralKey} + {"ephemeralKey": ephemeralKey} ) super().__init__( bytesOb, sender, senderID, + sourceNode, recipient, recipientNode, cryptographyInfo, - packetsClass=3, + 3, + 0, + pskEncrypt=True ) diff --git a/src/Packets/Messages/Protocols/hopper/Request.py b/src/Packets/Messages/Protocols/hopper/Request.py index 1038262e..d8e6db7b 100644 --- a/src/Packets/Messages/Protocols/hopper/Request.py +++ b/src/Packets/Messages/Protocols/hopper/Request.py @@ -12,9 +12,10 @@ class HopperRequest(Message): recipientNode, url, params, + method, cryptographyInfo, ): - bytesOb = Packets.Message.dict2bytes({"url": url, "params": params}) + bytesOb = Packets.Message.dict2bytes({"url": url, "parameters": params, "method": method, "recipient": sender, "recipientNode": sourceNode}) super().__init__( bytesOb, sender, diff --git a/src/Packets/Messages/Protocols/hopper/Response.py b/src/Packets/Messages/Protocols/hopper/Response.py index 6540fd58..ad219506 100644 --- a/src/Packets/Messages/Protocols/hopper/Response.py +++ b/src/Packets/Messages/Protocols/hopper/Response.py @@ -4,18 +4,20 @@ import Packets.Message class HopperResponse(Message): def __init__( - self, sender, senderID, recipient, recipientNode, response, cryptographyInfo + self, sender, senderID, sourceNode, recipient, recipientNode, response, cryptographyInfo ): - bytesOb = Packets.Message.dict2bytes({"response": response}) + bytesOb = Packets.Message.dict2bytes({"res": response, "recipient": recipient, "target": "hopper"}) - bytesOb = cryptographyInfo.encrypt(bytesOb, recipientNode) + # bytesOb = cryptographyInfo.encrypt(bytesOb, recipientNode) super().__init__( bytesOb, sender, senderID, + sourceNode, recipient, recipientNode, cryptographyInfo, - packetsClass=5, + 5, + 1 ) diff --git a/src/Packets/Messages/Protocols/map/Announce.py b/src/Packets/Messages/Protocols/map/Announce.py new file mode 100644 index 00000000..9683743d --- /dev/null +++ b/src/Packets/Messages/Protocols/map/Announce.py @@ -0,0 +1,36 @@ +from Packets.Message import Message +import Packets.Message +import logging + + +logger = logging.getLogger("__main__." + __name__) + +# TODO: Add public key + +class AnnounceMessage(Message): + def __init__( + self, + sender, + senderID, + sourceNode, + cryptographyInfo, + mapping, + ): + mapping["publicKey"] = cryptographyInfo.getRecord("key", "self")["publicKey"] + recipient = -1 + recipientNode = -1 + bytesOb = Packets.Message.dict2bytes(mapping) + logger.log(10, "Mapping bytes") + logger.log(10, bytesOb) + super().__init__( + bytesOb, + sender, + senderID, + sourceNode, + recipient, + recipientNode, + cryptographyInfo, + 0, + 0, + target=False, + ) diff --git a/src/Packets/Packet.py b/src/Packets/Packet.py index 196a0049..c114c312 100755 --- a/src/Packets/Packet.py +++ b/src/Packets/Packet.py @@ -32,6 +32,7 @@ class Packet: packetNumber=False, packetCount: int = 1, packetsClass: int = -1, + primaryMessage=None, ): if packetsID == False: self.packetsID, self.packetNumber, self.data, self.packetsClass = ( @@ -42,14 +43,7 @@ class Packet: self.packetNumber = packetNumber self.data = data self.packetsClass = packetsClass - if packetsClass != -1: - pass - """template = Daisy("daisy/packet_templates/template.lookup").get()[ - str(packetsClass).zfill(2) - ] - edata = Daisy("daisy/packet_templates/" + template) - for key in edata.get().keys(): - self.data[key] = edata.get()[key]""" + self.primaryMessage = primaryMessage def parsePayload(data): """ @@ -75,6 +69,8 @@ class Packet: } if res["data"] == "": res.pop("data") + if self.primaryMessage != None: + res["primaryMessage"] = self.primaryMessage ores = msgpack.dumps(res) # logging.log(20, "Packet size: " + str(sys.getsizeof(ores))) return ores diff --git a/src/Packets/SubMessage.py b/src/Packets/SubMessage.py index 31a144b1..186bd13d 100644 --- a/src/Packets/SubMessage.py +++ b/src/Packets/SubMessage.py @@ -1,4 +1,39 @@ -class SubMessage: - """ - TODO - """ +from Packets.Message import Message +import Packets.Message +import logging + + +logger = logging.getLogger("__main__." + __name__) + +class SubMessage(Message): + def __init__( + self, + sender, + senderID, + sourceNode, + recipient, + recipientNode, + cryptographyInfo, + protocolID, + pAction, + data, + target=True, + primaryMessage=None + ): + bytesOb = Packets.Message.dict2bytes(data) + logger.log(10, "Submessage bytes") + logger.log(10, bytesOb) + super().__init__( + bytesOb, + sender, + senderID, + sourceNode, + recipient, + recipientNode, + cryptographyInfo, + protocolID, + pAction, + target=target, + subMessage=True, + primaryMessage=primaryMessage + ) diff --git a/src/Services/ToDo.py b/src/Services/ToDo.py index e69de29b..a7763443 100644 --- a/src/Services/ToDo.py +++ b/src/Services/ToDo.py @@ -0,0 +1,27 @@ +from Services.Action import Action + + +class ToDo: + def __init__(self): + self.actions = [] + + # otf here meaning on the fly + # This prefix should be applied to optional + # fields that allow soft defining components + # by taking in their initialization parameters + # and instantiating an object with those + # Q: Should be break camel case here for cleaner + # reading? ex. otf_action looks better then otfAction + def add(self, action, otfAction=None, otfData=None): + if otfAction != None: + if otfData != None: + action = Action(otfAction, otfData) + + self.actions.append(action) + + def next(self): + if len(self.actions) < 1: + return (False, None) + else: + action = self.actions.pop() + return (action.getAction(), action.getData()) diff --git a/src/Siph/map.py b/src/Siph/map.py index 1f516184..58471b01 100755 --- a/src/Siph/map.py +++ b/src/Siph/map.py @@ -39,7 +39,20 @@ class Network: self.mimport() self.lookup = {} - def addLookup(self, onodeID: str, mnodeID: str): + async def addLookup(self, onodeID: str, mnodeID: str): + """ + Adds node to lookup + + Parameters + ---------- + onodeID: str + Internal nodeID + mnodeID: str + MeshTastic nodeID + """ + self.lookup[onodeID] = mnodeID + + def syncaddLookup(self, onodeID: str, mnodeID: str): """ Adds node to lookup diff --git a/src/Splash/res/css/style.css b/src/Splash/res/css/style.css index 8279db36..1e1a3e30 100755 --- a/src/Splash/res/css/style.css +++ b/src/Splash/res/css/style.css @@ -8,6 +8,21 @@ --palette-two: #A6B08E; --palette-three: #879B77; --palette-four: #61805B; + --grid-columns: 8; + --grid-rows: 8; +} + +#controls { + display: grid; + grid-template-columns: repeat(var(--grid-columns), auto); + gap: 5%; +} + +#render { + display: grid; + grid-template-columns: repeat(var(--grid-columns), 20px); + grid-template-rows: repeat(var(--grid-rows), 20px); + border: 1px solid black; } html { @@ -22,6 +37,10 @@ html { background-color: var(--palette-two); } +.plankInner { + display: none; +} + ul { padding: 0; list-style-type: none !important; @@ -29,9 +48,12 @@ ul { li { padding-top: 5px; + text-decoration: none; + list-style-type: none; } input[type=text], input[type=number] { min-width: 150px; -} + max-width: 150px; +} \ No newline at end of file diff --git a/src/Splash/res/img/hopperdisplay.png b/src/Splash/res/img/hopperdisplay.png new file mode 100644 index 0000000000000000000000000000000000000000..fdf2beb02bd2a0ad2c22deb46f9f0eb37a7427a4 GIT binary patch literal 707 zcmV;!0zCbRP)t%FMusTF)JZJM+~A0bJR;wrc}2o9o) zufhL7SHV?55EMbg(b?A`63=Z5EuuHvoFCu4-{Is7_#zoS>oiP2Hs{*W>9Dr2xTN`C zXr+s0#_5Rbjx{+u8{yyec}Jb9w;|Q*|9gL~)}-O+P}d|ps#~@zEYlEKb1lVN!kv0L zo)kV59<*gZ;VY#l%KWT2Rpxiab}TvtKGjo8)o`MQ^|YN89uN*@GX=dekNT1}BR4-M zwMTX@4pF8FqmiJ%Dj8ftQgf2o-@c)eZ-%^hba`78vBi-hE#5&<3)0n)YKm^ilcA_` z|DC5}jtrN(w}qkc$@AX&z`q0E+n)DzRsU7kH literal 0 HcmV?d00001 diff --git a/src/Splash/res/js/custom.js b/src/Splash/res/js/custom.js new file mode 100644 index 00000000..7c610886 --- /dev/null +++ b/src/Splash/res/js/custom.js @@ -0,0 +1,164 @@ +function hopID() { + document.getElementById("hopperPeerID").value = + document.getElementById("peerID").value; +} + +function toggle(id) { + var cstyle = document.getElementById(id).style.display; + var nstyle = "block"; + if (cstyle == "block") { + nstyle = "none"; + } + document.getElementById(id).style.display = nstyle; +} + +function addFin() { + var pfin = document.querySelector("#fins").lastElementChild; + var pclone = pfin.cloneNode(); + pclone.id = "cfin"; + var cid = pclone.firstElementChild.id; + cid = cid.substring(3); + cid = ("" + parseInt(cid) + 1).padStart(2, "0"); + cid = fin + cid; + pclone.firstElementChild.id = cid; + pclone.firstElementChild.id = cid; + pfin.insertAdjacentElement("afterend", pclone); +} +function getFins() { + var fins = document.querySelector("#fins").children; + var finsStr = ""; + for (var i = 0; i < fins.length; i++) { + var fin = fins[i]; + finsStr = finsStr + fin.firstElementChild.value; + if (i != fins.length - 1) { + finsStr = finsStr + 1; + } + } + document.getElementById("finsStr").value = finsStr; +} +function getCatch() { + document.getElementById("catchPeerID").value = + document.getElementById("peerID").value; + getFins(); +} + +// P2Chat code +function splash(that) { + //alert(parent.id); + //alert(parent.getAttribute("data-coord")); + //alert(that.value); + that.style.backgroundColor = document.querySelector("#color").value; +} + +function cGen(that) { + document.getElementById("render").innerHTML = ""; + var parent = that.parentElement; + // alert("parent"); + var canvasX = Number(document.querySelector("#canvasX").value); + // alert("x"); + canvasX = Math.floor(canvasX); + document + .querySelector(":root") + .style.setProperty("--grid-rows", "" + canvasX); + // alert("grid"); + + var canvasY = Number(document.querySelector("#canvasY").value); + //alert(document.querySelector("#canvasY").value); + canvasY = Math.floor(canvasY); + document + .querySelector(":root") + .style.setProperty("--grid-columns", "" + canvasY); + //alert(canvasY); + var nodeRender = ""; + var cloneRender = ""; + var nodeControl = ""; + var cloneControl = ""; + //alert("start loop"); + for (let x = 0; x < canvasX; x++) { + for (let y = 0; y < canvasY; y++) { + //alert(" in"); + nodeRender = document.getElementById("rendertemplate"); + //alert(" past template"); + + cloneRender = nodeRender.cloneNode(true); + cloneRender.style.display = "grid"; + cloneRender.id = "i" + x + "x" + y; + if (y == 0) { + //alert(cloneRender.innerHTML); + } + document.getElementById("render").appendChild(cloneRender); + } + } +} + +function setColor(that) { + var color = that.value; + //alert(typeof color); + if (color.includes("#")) { + document.querySelector("#color").value = color; + } else { + document.querySelector("#color").value = "#" + color; + document.querySelector("#picker").value = "#" + color; + } +} + +function saveAs(uri, filename) { + var link = document.createElement("a"); + if (typeof link.download === "string") { + link.href = uri; + link.download = filename; + + //Firefox requires the link to be in the body + document.body.appendChild(link); + + //simulate click + link.click(); + + //remove the link when done + document.body.removeChild(link); + } else { + window.open(uri); + } +} + +function save(toFile) { + var canvas = document.createElement("canvas"); + var canvasX = Number(document.querySelector("#canvasX").value); + var canvasY = Number(document.querySelector("#canvasY").value); + //alert(canvasX); + //alert(canvasY); + canvas.width = canvasY; + canvas.height = canvasX; + var ctx = canvas.getContext("2d"); + var x = 0; + var y = 0; + + for (x = 0; x < canvasX; x++) { + for (y = 0; y < canvasY; y++) { + //alert(document.querySelector("#i" + x + "x" + y).style.backgroundColor); + //alert("before fill style"); + ctx.fillStyle = document.querySelector( + "#i" + x + "x" + y, + ).style.backgroundColor; + //ctx.fillStyle = "#00ff00"; + //alert("after fill style"); + ctx.fillRect(y, x, 1, 1); + } + } + if (toFile) { + saveAs(canvas.toDataURL("image/png"), " download.png"); + } else { + document.getElementById("p2img").value = canvas.toDataURL("image/png"); + } +} +function p2ToBubble() { + save(); + var bub = new Object(); + bub.img = document.getElementById("p2img").value; + document.getElementById("chat_message").value += JSON.stringify(bub); +} + +document.addEventListener("DOMContentLoaded", function(event) { + setColor(document.getElementById("picker")); + cGen(document.getElementById("canvasY")); +}); diff --git a/src/Splash/serve.py b/src/Splash/serve.py index 6df3ec73..7b05263b 100755 --- a/src/Splash/serve.py +++ b/src/Splash/serve.py @@ -1,11 +1,13 @@ from uuid import uuid4 import Components.hopper as hopper from Packets.Messages.Protocols.catch.Request import CatchRequest +from Packets.Messages.Protocols.catch.IndexSync import IndexSync from Packets.Messages.Protocols.hopper.Request import HopperRequest +from Packets.Messages.Protocols.bubble.Bubble import Bubble from microdot import Microdot from microdot import send_file -from microdot.websocket import with_websocket +from microdot.websocket import with_websocket, WebSocketError from microdot import Request from microdot.jinja import Template from microdot.session import Session, with_session @@ -13,8 +15,16 @@ from microdot.session import Session, with_session import random import json import time +import logging +import traceback +import uuid +import re + import msgpack + +logger = logging.getLogger("__main__." + __name__) + # Enable 500 kB files in the webui Request.max_content_length = 1024 * 1024 * 0.5 Request.max_body_length = 1024 * 1024 * 0.5 @@ -57,26 +67,24 @@ class Server: catch, onodeID, network, - cLog, cryptographyInfo, remoteCatchIndex, - cache + cache, ): - self.cLog = cLog self.transceiver = transceiver self.network = network - self.network.addLookup(onodeID, self.transceiver.interface.localNode.nodeNum) + self.network.syncaddLookup(onodeID, self.transceiver.interface.localNode.nodeNum) self.nodeID = str(onodeID) self.peerIDs = {} self.app = Microdot() - # TODO: Secret key generation - self.session = Session(self.app, secret_key='6e012a8d-f857-4bd1-a245-bbe6a27e6440') + self.session = Session( + self.app, secret_key=str(uuid.uuid4()) + ) self.catch = catch self.cache = cache self.cryptographyInfo = cryptographyInfo self.remoteCatchIndex = remoteCatchIndex - # self.nmap = {self.nodeID: self.t.interface.localNode.nodeNum} - # self.cLog(20, "Initialized server") + logger.info("Initialized server") @self.app.route("/res/") async def static(request, path): @@ -86,7 +94,45 @@ class Server: if ".." in path: # directory traversal is not allowed return "Not found", 404 - return send_file("webui/build/res/" + path, max_age=86400) + return send_file("Splash/build/res/" + path, max_age=86400) + + @self.app.route("/") + async def index(request): + """ + Static handler to serve the web ui + """ + try: + return send_file("Splash/build/index/index.html") + except Exception as e: + logger.error(traceback.format_exc()) + return "Server error", 500 + + @self.app.route("/hop/") + async def hop(request, tmpResourceID): + try: + return self.cache.get("tmp/hopper/" + tmpResourceID).get()["html"] + except Exception as e: + logger.error(traceback.format_exc()) + return "Server error", 500 + + @self.app.route("/api/json") + async def api(request): + try: + return {"hello": "world"} + except Exception as e: + logger.error(traceback.format_exc()) + return "Server error", 500 + + @self.app.route("/admin") + @with_session + async def admin(request): + try: + return Template("Splash/admin/admin.html").render(psks=self.getPSKs()) + except Exception as e: + logger.error(traceback.format_exc()) + return "Server error", 500 + + @self.app.route("/bubble") @with_websocket @@ -99,114 +145,133 @@ class Server: `🔗 HTMX docs `_ """ while True: - r = await ws.receive() - message = json.loads(r) - trigger = message["HEADERS"]["HX-Trigger"] - # TODO: Drop old id from cache on regen - if trigger == "gpID": - peerID = str(random.randrange(0, 1000000)).zfill(6) - await ws.send( - """ -

Peer ID: {0}

- """.format( - peerID - ) - ) - await ws.send( - """ - - """.format( - peerID - ) - ) - await ws.send( - """ -

Node ID: {0}

- """.format( - self.nodeID - ) - ) - await ws.send( - """ """.format( - self.nodeID - ) - ) - await ws.send( - "".format( - peerID - ) - ) - await ws.send( - "".format( - peerID - ) - ) - peer = {"usedLast": round(time.time() * 1000), "ws": ws} - self.peerIDs[peerID] = peer - elif trigger == "bubble": - sender = message["bID"] - data = message["chat_message"] - # TODO: Setting sender name id - # senderName = message["senderID"] - senderName = 000000 - recipient = message["recipientID"] - recipientNode = message["recipientNode"] - await self.t.addPackets( - msgpack.dumps({"data": data}), - sender, - senderName, - recipient, - int(recipientNode), - directID=self.network.doLookup(recipientNode), - packetsClass=2, - ) - elif trigger == "catch": - res = self.catch.get( - message["head"], - message["body"], - fins=message["finsStr"].split(","), - ) - if res == False: + try: + r = await ws.receive() + message = json.loads(r) + #logger.debug(json.dumps(message, indent=4)) + trigger = message["HEADERS"]["HX-Trigger"] + logger.debug(f"Trigger: {trigger}") + # TODO: Drop old id from cache on regen + if trigger == "gpID": + peerID = str(random.randrange(0, 1000000)).zfill(6) await ws.send( - '
{0}
'.format( - "Searching PierMesh for Catch please wait..." + """ +

Peer ID: {0}

+ """.format( + peerID ) ) - - peerID = message["catchPeerID"] - q = { - "head": message["head"], - "body": message["body"], - "fins": message["fins"].split(","), - } - if q["fins"] == "": - del q["fins"] - q = self.remoteCatchIndex.search(q) - if q != False: - CatchRequest( - peerID, - 000000, - self.nodeID, - q["remoteNode"], - q["remoteNode"]", - self.cryptographyInfo, - message["head"], - message["body"], - message["fins"], + await ws.send( + """ + + """.format( + peerID ) - # TODO: Daisy replication settings + ) + await ws.send( + """ +

Node ID: {0}

+ """.format( + self.nodeID + ) + ) + await ws.send( + """ """.format( + self.nodeID + ) + ) + await ws.send( + "".format( + peerID + ) + ) + await ws.send( + "".format( + peerID + ) + ) + peer = {"usedLast": round(time.time() * 1000), "ws": ws} + self.peerIDs[peerID] = peer + elif trigger == "bubble": + sender = message["bID"] + data = message["chat_message"] + # TODO: Setting sender name id + # senderName = message["senderID"] + senderName = 000000 + recipient = message["recipientID"] + recipientNode = message["recipientNode"] + r = Bubble( + sender, + senderName, + self.nodeID, + recipient, + recipientNode, + self.cryptographyInfo, + data, + ) + await self.transceiver.sendMessage(r) + elif trigger == "catch": + res = self.catch.get( + message["head"], + message["body"], + fins=message["finsStr"].split(","), + ) + if res == False: + await ws.send( + '
{0}
'.format( + "Searching PierMesh for Catch please wait..." + ) + ) + + peerID = message["catchPeerID"] + q = { + "head": message["head"], + "body": message["body"], + } + fins = None + if "fins" in message: + if message["fins"] != "": + q["fins"] = message["fins"] + fins = message["fins"] + # TODO: Handling multiple results + q = self.remoteCatchIndex.search(q)[0] + if q != False: + m = CatchRequest( + peerID, + 000000, + self.nodeID, + q["remoteNode"], + q["remoteNode"], + self.cryptographyInfo, + message["head"], + message["body"], + fins + ) + await self.transceiver.sendMessage(m) + # TODO: Daisy replication settings elif trigger == "hopper": url = message["url"] - isPost = bool(message["isPost"]) - remote = bool(message["remote"]) + logger.debug(url) + isPost = "isPost" in message.keys() + method = "get" + if isPost: + method = "post" + remote = "remote" in message.keys() remoteNode = message["remoteNode"] - params = json.loads(message["params"]) + params = "" + try: + params = json.loads(message["params"]) + except json.decoder.JSONDecodeError: + # TODO: Proper error code + # TODO: Fix stalling at startup + # TODO: Request missing packets` + logger.error("Parameters on hop request were not json, dropping") # TODO: Redirecting to html content - if remote: - peerID = message["peerID"] + if remote and (remoteNode != ""): + peerID = message["hopperPeerID"] await ws.send( '
{0}
'.format( - "Requesting hop from remote node..." + "Requesting hop from remote node..." ) ) @@ -218,8 +283,10 @@ class Server: remoteNode, url, params, + method, self.cryptographyInfo, ) + await self.transceiver.sendMessage(r) else: if isPost: await ws.send( @@ -230,63 +297,98 @@ class Server: else: res = hopper.get(url, params) if res["content-type"] == "text/html": + logger.debug("Local hopping done, html content found") resID = uuid4() - self.cache.create("tmp/hopper/" + resID, {"html": res}) - await ws.send("
".format(resID)) + self.cache.create( + "tmp/hopper/" + resID, {"html": res} + ) + await ws.send( + "
".format( + resID + ) + ) else: + logger.debug("Local hopping done, non html content found") await ws.send( '
{0}
'.format(res) ) - else: - await ws.send('
{0}
'.format(res)) - # TODO: Catch update packets - elif trigger == "catchEdit": - self.catch.addc( - message["eID"], - self.nodeID, - message["sep"], - message["head"], - message["body"], - {"html": message["catchContent"]}, - ) - await ws.send( + # TODO: Catch update packets + elif trigger == "catchEdit": + if message["eID"] == "": + raise ValueError("Peer ID is blank") + self.catch.addc( + message["eID"], + self.nodeID, + message["sep"], + message["head"], + message["body"], + {"html": message["catchContent"]}, + ) + await ws.send( + """ +
  • OK
""" -
  • OK
- """ - ) - else: - await ws.send( - """
hi
""" - ) - - @self.app.route("/") - async def index(request): - """ - Static handler to serve the web ui - """ - return send_file("webui/build/index/index.html") - - @self.app.route("/hop/") - async def hop(request, tmpResourceID): - return self.cache.get("tmp/hopper/" + tmpResourceID).get()["html"] - - @self.app.route("/api/json") - async def api(request): - return {"hello": "world"} - - @self.app.route("/admin") - @with_session - async def admin(request): - return Template('admin/admin.html').render(psks=self.getPSKs()) - + ) + logger.info(self.catch.genIndex(self.nodeID)) + indexUpdate = IndexSync( + message["eID"], + 0, + self.nodeID, + "-00001", + "-00001", + self.cryptographyInfo, + self.catch.genIndex(self.nodeID) + ) + await self.transceiver.sendMessage(indexUpdate) + elif trigger == "admin": + # TODO + pass + else: + await ws.send( + """
hi
""" + ) + except WebSocketError as e: + pass + + # Uncomment below for WebSocket debugging + logger.debug(traceback.format_exc()) + return "Server error", 500 + except Exception as e: + logger.error(traceback.format_exc()) + return "Server error", 500 + async def getPSKs(self): - psks = [{"psk": v["PSK"], "nodeID": k} for k,v in self.cryptographyInfo["msg"].items()] + psks = [ + {"psk": v["PSK"], "nodeID": k} + for k, v in self.cryptographyInfo["msg"].items() + ] return psks - async def sendToPeer(self, peerID: str, data: str): + # TODO: Send catch to catch display + async def sendToPeer(self, peerID: str, data: str, target: str): """ Send data to Websocket of peer with peerID """ - await self.peerIDs[peerID]["ws"].send( - "
  • {0}
".format(data) - ) + logger.debug(target) + if target == "bubble": + mapper = [] + for ob in re.findall('{(.+?)}', data): + cobs = "{" + ob + "}" + cob = json.loads(cobs) + if "img" in cob.keys(): + cimg = cob["img"] + mapper.append([cobs, f""]) + for map in mapper: + data = data.replace(map[0], map[1]) + await self.peerIDs[peerID]["ws"].send( + "
  • {0}
".format(data) + ) + elif target == "catch": + logger.debug("In catch") + await self.peerIDs[peerID]["ws"].send( + "
{0}
".format(data) + ) + elif target == "hopper": + await self.peerIDs[peerID]["ws"].send( + "
{0}
".format(data) + ) diff --git a/src/Splash/templates/index/index.html b/src/Splash/templates/index/index.html index 2ca03b01..7144179c 100755 --- a/src/Splash/templates/index/index.html +++ b/src/Splash/templates/index/index.html @@ -1,24 +1,25 @@ {% extends "shared/base.html" %} {% block body %} - -PierMesh logo +
+ PierMesh logo +

PierMesh

+
+
+

+
+

Peer ID:

+ +

Node ID:

+ + +

{% include "shared/hopper.html" %}
{% include "shared/catch.nav.html" %}
{% include "shared/catch.editor.html" %} -
-
-
-
-

Peer ID:

- -

Node ID:

- - -

{% include "shared/messenger.html" %} -{% endblock %} +{% endblock %} \ No newline at end of file diff --git a/src/Splash/templates/shared/base.html b/src/Splash/templates/shared/base.html index 054866e5..e5295abe 100755 --- a/src/Splash/templates/shared/base.html +++ b/src/Splash/templates/shared/base.html @@ -10,6 +10,8 @@ + @@ -18,4 +20,4 @@ {% endblock %} - + \ No newline at end of file diff --git a/src/Splash/templates/shared/catch.editor.html b/src/Splash/templates/shared/catch.editor.html index c575d5f5..b6e3e403 100644 --- a/src/Splash/templates/shared/catch.editor.html +++ b/src/Splash/templates/shared/catch.editor.html @@ -1,23 +1,23 @@ -
- -
- Catch publisher
-
    -
-
- Head

- Seperator

- Body

- Fins
-
    -
  • -
  • -
- Content -
- -
- - -
-
+{% extends "shared/plank.html" %} +{% set plankTitle = "catch editor" %} +{% set formID = "catchEdit" %} +{% set icon = "catchdisplay.png" %} +{% block insert %} +
    +
+Head

+Seperator

+Body

+Fins
+
    +
  • +
  • +
+Content +
+ +
+ + + +{% endblock %} \ No newline at end of file diff --git a/src/Splash/templates/shared/catch.nav.html b/src/Splash/templates/shared/catch.nav.html index a830813d..7a7b880d 100755 --- a/src/Splash/templates/shared/catch.nav.html +++ b/src/Splash/templates/shared/catch.nav.html @@ -1,58 +1,31 @@ -
- -
- Catch

-
- -
- -
- -
-
- -
- -
    - Fins: -
  • - -
  • -
- - - - - -
- Results: -
- {% include "shared/catch.html" %} -
-
+{% extends "shared/plank.html" %} +{% set plankTitle = "catch" %} +{% set formID = plankTitle %} +{% block insert %} + +
+ +
+ +
+
+ +
+ +
    + Fins: +
  • + +
  • +
+ + + +
+ +
+Results: +
+{% include "shared/catch.html" %} +
+{% endblock %} \ No newline at end of file diff --git a/src/Splash/templates/shared/hopper.html b/src/Splash/templates/shared/hopper.html index 985b9729..61fa5ff2 100644 --- a/src/Splash/templates/shared/hopper.html +++ b/src/Splash/templates/shared/hopper.html @@ -1,31 +1,30 @@ -
- -
- Hopper

-
- -
- -
- -
- -
- -
- -
- -
- -
- -
- -
- - -
-
-
-
+{% extends "shared/plank.html" %} +{% set plankTitle = "hopper" %} +{% set formID = plankTitle %} +{% block insert %} + +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+
+ + +
+
+{% endblock %} \ No newline at end of file diff --git a/src/Splash/templates/shared/messenger.html b/src/Splash/templates/shared/messenger.html index 39a8ad5d..016da4ca 100755 --- a/src/Splash/templates/shared/messenger.html +++ b/src/Splash/templates/shared/messenger.html @@ -1,20 +1,18 @@ -
- -
- Bubble -
-
- Responses:
    -
-
-
- Peer ID:
-
- Node ID:
-
- Data
-
- - -
-
+{% extends "shared/plank.html" %} +{% set plankTitle = "bubble" %} +{% set formID = plankTitle %} +{% block insert %} +Responses:
    +
+
+{% include "shared/p2chat.html" %} +Peer ID:
+
+Node ID:
+
+Data
+
+ + +{% endblock %} \ No newline at end of file diff --git a/src/Splash/templates/shared/p2chat.html b/src/Splash/templates/shared/p2chat.html new file mode 100644 index 00000000..9889750e --- /dev/null +++ b/src/Splash/templates/shared/p2chat.html @@ -0,0 +1,22 @@ +
+ + + Background color: +
+
+
+ Color picker: + Hex input: +
+
+
+ X + Y + + +
+
+
+
+
\ No newline at end of file diff --git a/src/Splash/templates/shared/plank.html b/src/Splash/templates/shared/plank.html new file mode 100644 index 00000000..c4fc1f26 --- /dev/null +++ b/src/Splash/templates/shared/plank.html @@ -0,0 +1,18 @@ +
+ {% if icon is defined %} + + {% else %} + + {% endif %} + {{ plankTitle|capitalize }}

+
+ +
+
+
+
+ {% block insert %} + {% endblock %} +
+
+
\ No newline at end of file diff --git a/src/Sponge/Protocols/Yellow.py b/src/Sponge/Protocols/Yellow.py new file mode 100644 index 00000000..af57e8d8 --- /dev/null +++ b/src/Sponge/Protocols/Yellow.py @@ -0,0 +1,184 @@ +from Services.Action import Action +from Config.Context import Context +from Packets.Message import Message + +import logging + +logger = logging.getLogger("__main__." + __name__) + + +class YCTX(Context): + def __init__( + self, + packetsID, + packetCount, + pAction, + todo, + cryptographyInfo, + sourceNode, + subMessage=False, + subMessages={}, + submessagesIDs=[], + eData=None, + ): + super().__init__( + sourceNode=sourceNode, + packetsID=packetsID, + packetCount=packetCount, + pAction=pAction, + cryptographyInfo=cryptographyInfo, + subMessages=subMessages, + submessagesIDs=submessagesIDs, + eData=eData, + subMessage=subMessage, + ) + self.todo = todo + + +class Yellow: + # TODO: Submessage completion actions + pActions = [] + + def __init__( + self, + yctx: YCTX, + ): + self.yctx = yctx + self.message = None + self.submessages = yctx["subMessages"]["val"] + self.submessagesIDs = yctx["submessagesIDs"]["val"] + self.finishedSubmessages = {} + self.dataOrder = [] + self.data = [] + self.nonce = None + self.tag = None + self.gotHead = False + self.todo = yctx.todo + if yctx["eData"]["val"] != None: + self.dataOrder = yctx["eData"]["val"]["dataOrder"] + self.data = yctx["eData"]["val"]["data"] + + def checkComplete(self): + logger.log(30, "In check complete") + for submessage in self.submessagesIDs: + submessage = str(submessage) + if submessage not in self.finishedSubmessages.keys(): + logger.log(30, f"Submessage {submessage} missing") + return False + logger.log(30, "Submessages complete") + return True + + async def id(self): + return self.packetsID + + async def processPacket(self, p, subMessage=False, rdoaoc=[]): + doCheck = True + if subMessage == False and p["packetNumber"] != False: + #if not p["packetNumber"] in self.dataOrder: + if p["packetNumber"] not in self.dataOrder: + self.data.append(p["data"]) + self.dataOrder.append(p["packetNumber"]) + logger.log(10, "data") + logger.log(10, self.data) + else: + doCheck = False + elif p["packetNumber"] == False: + if not self.gotHead: + self.data.append(False) + self.dataOrder.append(False) + self.submessagesIDs = p["submessages"] + self.gotHead = True + else: + doCheck = False + else: + if not str(p["packetsID"]) in self.submessages.keys(): + self.submessages[str(p["packetsID"])] = { + "data": [], + "dataOrder": [] + } + if p["packetNumber"] not in self.submessages[str(p["packetsID"])]["dataOrder"]: + self.submessages[str(p["packetsID"])]["data"].append(p["data"]) + self.submessages[str(p["packetsID"])]["dataOrder"].append( + p["packetNumber"]) + else: + doCheck = False + if doCheck: + if subMessage != False: + if len(self.submessages[str(p["packetsID"])]["data"]) > (p["packetCount"]-1): + self.finishedSubmessages[str(p["packetsID"])] = Message.reassemble( + None, self.submessages[str(p["packetsID"])], self.yctx["cryptographyInfo"]["val"], packetCount=p["packetCount"], subMessage=True, yctx=self.yctx + ) + # TODO: Trigger doact + return await self.doAct(subMessage=self.finishedSubmessages[str(p["packetsID"])]) + elif len(self.data) >= (self.yctx["packetCount"]["val"]): + logger.log( + 20, "Finished receiving for primary message " + + str(self.yctx["packetsID"]["val"]) + ) + """ + if self.yctx["wantFullResponse"]["val"] != False: + # TO DO: implement loop + # responseLoop(packets_id) + pass + """ + # self.data = Message.reassemble(None, self, self.yctx["cryptographyInfo"]["val"]) + return await self.doAct(repeatDataOnActions=rdoaoc) + + async def dump(self): + msg = {} + msg["packetsID"] = self.yctx["packetsID"]["val"] + msg["data"] = self.data + msg["pAction"] = self.yctx["pAction"]["val"] + msg["submessages"] = self.submessages + return msg + + async def doAct( + self, + setpAction=False, + repeatDataOnActions=[], + subMessage=False + ): + # TODO; Get submessage instead of primary + m = await self.dump() + if subMessage != False: + logger.log(30, "Submessage complete") + # self.submessages[m["packetsID"]] = self.yctx["crytopgraphyInfo"]["val"].decrypt(m["data"]) + # TODO: Debug + logger.log(10, subMessage) + if "nonce" in subMessage.keys(): + self.nonce = subMessage["nonce"] + self.tag = subMessage["tag"] + if self.checkComplete(): + if len(self.data) >= (self.yctx["packetCount"]["val"]): + # TODO: Raising out of bounds error + pAction = self.pActions[int(m["pAction"])] + logger.log(30, "Full message completed") + # TODO: Decrypt + self.data = Message.reassemble( + None, self, self.yctx["cryptographyInfo"]["val"]) + """ + data = self.cryptographyInfo.decrypt(m["data"]) + if data == False: + self.cryptographyInfo.sessionSetup() + data = self.cryptographyInfo.decrypt(m["data"]) + """ + # TODO: Go over whether we need to limit + # this from being a field in the data for + # primary messages + """ + todo.append( + Action( + "cLog", {"message": "Unknown pAction " + m["pAction"], "priority": 20} + ) + ) + ) + """ + self.data["submessages"] = self.submessages + self.data["yctx"] = self.yctx + act = Action(pAction, self.data) + self.todo.append(act) + if len(repeatDataOnActions) > 0: + for caction in repeatDataOnActions: + cact = Action(caction, self.data) + #self.todo.add(cact) + self.todo.append(cact) diff --git a/src/Sponge/Protocols/__init__.py b/src/Sponge/Protocols/__init__.py index e69de29b..64268257 100644 --- a/src/Sponge/Protocols/__init__.py +++ b/src/Sponge/Protocols/__init__.py @@ -0,0 +1,10 @@ +from . import hopper, map, cryptography, catch, bubble, daisy + +classDict = { + "bubble": bubble.Bubble, + "catch": catch.Catch, + "cryptography": cryptography.CryptographyFilter, + "daisy": daisy.Daisy, + "hopper": hopper.Hopper, + "map": map.Map, +} diff --git a/src/Sponge/Protocols/bubble.py b/src/Sponge/Protocols/bubble.py index ef28709e..bda2610f 100644 --- a/src/Sponge/Protocols/bubble.py +++ b/src/Sponge/Protocols/bubble.py @@ -1,19 +1,12 @@ -async def filter(completeMessage, recipient, recipientNode, onodeID, todo): +from Sponge.Protocols.Yellow import Yellow + +# TODO: Forwarding message to next node +# TODO: Method to get next node in path to recipient node + +class Bubble(Yellow): """ Peer to peer protol `🔗 Source `__ """ - m = completeMessage - if recipientNode == onodeID: - todo.append( - { - "action": "sendToPeer", - "data": {"res": m["data"]["data"], "recipient": recipient}, - } - ) - else: - # TODO: Forwarding message to next node - # TODO: Method to get next node in path to recipient node - # self.t.addPackets(m.data, sender, senderDisplay, recipient, recipientNode) - pass + pActions = ["sendToPeer"] diff --git a/src/Sponge/Protocols/catch.py b/src/Sponge/Protocols/catch.py index 554cfb2b..2b5131bb 100644 --- a/src/Sponge/Protocols/catch.py +++ b/src/Sponge/Protocols/catch.py @@ -1,36 +1,11 @@ -from Services.Action import Action -from src.Packets.Message import p +from Sponge.Protocols.Yellow import Yellow -async def filter(completeMessage, recipient, recipientNode, todo, toLocal=True): +class Catch(Yellow): """ Catch exchange protocol `🔗 Source `__ """ - # TODO: Checking for catch om other nodes and maintaining defined duplication via Daisy - # TODO: Daisy protocol - # TODO: pAction for syncing listing - # TODO: pAction for querying - m = completeMessage - # TODO: Sending to other nodes clients - pAction = int(m["pAction"]) - if pAction == 0: - todo.append( - Action( - "sendCatch", - {"head": m["head"], "body": m["body"], "fins": m["fins"]}, - recipientNode=m["sourceNode"], - recipent=m["sender"], - ) - ) - elif pAction == 1: - todo.append(Action("routeCatch", {"html": m["html"]}, recipient=recipient)) - elif pAction == 2: - todo.append(Action("syncIndex", m["index"])) - else: - todo.append( - Action( - "cLog", {"message": "Unknown pAction " + m["pAction"], "priority": 20} - ) - ) + + pActions = ["sendCatch", "routeCatch", "syncIndex"] diff --git a/src/Sponge/Protocols/cryptography.py b/src/Sponge/Protocols/cryptography.py index fc14ab2f..479dea9a 100644 --- a/src/Sponge/Protocols/cryptography.py +++ b/src/Sponge/Protocols/cryptography.py @@ -1,17 +1,11 @@ -async def filter(completeMessage, recipientNode, todo): +from Sponge.Protocols.Yellow import Yellow + + +class CryptographyFilter(Yellow): """ Cryptographic operations protocol `🔗 Source `__ """ - todo.append( - { - "action": "keyDeriveDH", - "data": { - "publicKey": completeMessage["data"]["publicKey"], - "peerEphemeralKey": completeMessage["data"]["ephemeralKey"], - "recipientNode": recipientNode, - }, - } - ) - # logging.log(10, "Adding cryptography request") + + pActions = ["initCryptography"] diff --git a/src/Sponge/Protocols/daisy.py b/src/Sponge/Protocols/daisy.py new file mode 100644 index 00000000..33979d6b --- /dev/null +++ b/src/Sponge/Protocols/daisy.py @@ -0,0 +1,9 @@ +from Sponge.Protocols.Yellow import Yellow + + +class Daisy(Yellow): + """ + Catch exchange protocol + + `🔗 Source `__ + """ diff --git a/src/Sponge/Protocols/hopper.py b/src/Sponge/Protocols/hopper.py index 1626a066..67a35a42 100644 --- a/src/Sponge/Protocols/hopper.py +++ b/src/Sponge/Protocols/hopper.py @@ -1,22 +1,11 @@ -from Services.Action import Action +from Sponge.Protocols.Yellow import Yellow -async def filter(completeMessage, todo, recipient, recipientNode): +class Hopper(Yellow): """ Internet inter(h)op protocol `🔗 Source `__ """ - m = completeMessage - pAction = int(m["pAction"]) - if pAction == 0: - todo.append( - Action( - "hop", - {"url": m["data"]["url"], "method": m["method"]}, - recipient=m["sender"], - recipientNode=m["sourceNode"], - ) - ) - elif pAction == 1: - Action("routeHop", {"res": m["res"]}, recipient=recipient) + pActions = ["hop", "routeHop"] + diff --git a/src/Sponge/Protocols/map.py b/src/Sponge/Protocols/map.py index d9b8c3d0..5f263d69 100644 --- a/src/Sponge/Protocols/map.py +++ b/src/Sponge/Protocols/map.py @@ -1,22 +1,17 @@ -async def filter(completeMessage, todo): +from Sponge.Protocols.Yellow import Yellow + + +class Map(Yellow): """ Network mapping protocol `🔗 Source `__ """ - m = completeMessage - todo.append( - { - "action": "map", - "data": { - "onodeID": m["data"]["onodeID"], - "mnodeID": m["data"]["mnodeID"], - }, - } - ) - todo.append( - { - "action": "initNodeDH", - "data": {"mnodeID": m["data"]["mnodeID"], "onodeID": m["data"]["onodeID"]}, - } - ) + + pActions = ["map", "initCryptography"] + + def process(self, message, isSubMessage=False): + rdoa = [] + if message["pAction"] == 0: + rdoa.append(self.pActions[1]) + super().process(message, isSubMessage=isSubMessage, repeatDataOnActions=rdoa) diff --git a/src/Sponge/base.py b/src/Sponge/base.py index 0ef55478..31f8a1a2 100644 --- a/src/Sponge/base.py +++ b/src/Sponge/base.py @@ -1,13 +1,14 @@ +import logging + import msgpack import traceback from Packets.Message import Message -import Sponge.Protocols.bubble -import Sponge.Protocols.map -import Sponge.Protocols.catch -import Sponge.Protocols.cryptography +import Sponge.Protocols as Protocols +logger = logging.getLogger("__main__." + __name__) + class Filter: """ Packet filtering orchestration @@ -30,13 +31,14 @@ class Filter: PierMesh node ID """ - def __init__(self, cache, onodeID, todo, cLog): - self.cLog = cLog + def __init__(self, cache, onodeID, todo, cryptographyInfo): self.cache = cache + self.cryptographyInfo = cryptographyInfo """ Messages is temporary storage for unfinished messages """ self.messages = {} + self.submessages = {} self.completed = [] self.todo = todo self.onodeID = onodeID @@ -49,7 +51,7 @@ class Filter: msgpack.loads(payload) return True except Exception as e: - self.cLog(20, "Not msgpack encoded, skipping") + logger.debug("Not msgpack encoded, skipping") return False def selfCheck(self, packet): @@ -57,97 +59,128 @@ class Filter: Check if this is a self packet, if so skip """ if packet["fromId"] == packet["toId"]: - self.cLog(20, "Self packet, ignored") + logger.log(20, "Self packet, ignored") return False else: return True - async def protoMap(self, protocolID: int): + async def protoMap(self, protocolID: int, packetsID, packetCount, sourceNode, submessagesIDs=[], pAction=None): """ Get protocol from protocol ID using the mlookup table """ protocolID = str(protocolID).zfill(6) - return self.cache.get("mlookup").get()[protocolID] + proto = self.cache.get("mlookup").get()[protocolID] + subMessages = {} + for pid in submessagesIDs: + if pid in self.submessages: + subMessages[pid] = self.submessages[pid] + ctx = Protocols.Yellow.YCTX( + packetsID, + packetCount, + pAction, + self.todo, + self.cryptographyInfo, + sourceNode, + submessagesIDs=submessagesIDs, + subMessages=subMessages + ) + cf = Protocols.classDict[proto]( + ctx + ) + return cf - async def protoRoute(self, completeMessage: dict): - """ - Route message to proper protocol handler - """ - m = completeMessage - """ - Shorthand reference completeMessage for ease - """ - sender = m["sender"] - senderDisplay = m["senderDisplayName"] - recipient = m["recipient"] - recipientNode = m["recipientNode"] - # TODO: Fix packets to use class - protocol = await self.protoMap(m["packetsClass"]) - self.cLog(20, "Protocol: " + protocol) - if protocol == "bubble": - await Sponge.Protocols.bubble.filter( - m, recipient, recipientNode, self.onodeID, self.todo - ) - elif protocol == "map": - await Sponge.Protocols.map.filter(m, self.todo) - elif protocol == "catch": - await Sponge.Protocols.catch.filter(m, recipient, recipientNode, self.todo) - elif protocol == "cryptography": - await Sponge.Protocols.cryptography.filter( - completeMessage, recipientNode, self.todo - ) - elif protocol == "hopper": - await Sponge.Protocols.hopper.filter( - completeMessage, self.todo, recipient, recipientNode - ) - else: - self.cLog(30, "Cant route, no protocol") + # async def protoRoute(self, completeMessage: dict): + # """ + # Route message to proper protocol handler + # """ + # m = completeMessage + # """ + # Shorthand reference completeMessage for ease + # """ + # sender = m["sender"] + # senderDisplay = m["senderDisplayName"] + # recipient = m["recipient"] + # recipientNode = m["recipientNode"] + # protocol = await self.protoMap(m["packetsClass"]) + # logger.log(20, "Protocol: " + protocol) + # if protocol == "bubble": + # await Sponge.Protocols.bubble.filter( + # m, recipient, recipientNode, self.onodeID, self.todo + # ) + # elif protocol == "map": + # await Sponge.Protocols.map.filter(m, self.todo) + # elif protocol == "catch": + # await Sponge.Protocols.catch.filter(m, recipient, recipientNode, self.todo) + # elif protocol == "cryptography": + # await Sponge.Protocols.cryptography.filter( + # completeMessage, recipientNode, self.todo + # ) + # elif protocol == "hopper": + # await Sponge.Protocols.hopper.filter( + # completeMessage, self.todo, recipient, recipientNode + # ) + # else: + # logger.log(30, "Cant route, no protocol") async def sieve(self, packet): """ Base filtering logic, takes a single MeshTastic packet """ + # TODO: Instantiating the protocol on submessage p = packet["decoded"]["payload"] if self.selfCheck(packet) and self.mCheck(p): try: p = msgpack.loads(p) - self.cLog(20, p) + logger.log(20, p) packetsID = p["packetsID"] packetsClass = p["packetsClass"] if packetsID == 0: - self.cLog(20, "Single packet") + logger.log(20, "Single packet") # Do sp handling pass if packetsID in self.completed: raise ValueError("Message already completed") - if not (packetsID in self.messages): - self.messages[packetsID] = { - "packetCount": p["packetCount"], - "data": [], - "finished": False, - "dataOrder": [], - } - if "wantFullResponse" in p.keys(): - for k in p.keys(): - if k != "data": - self.messages[packetsID][k] = p[k] - elif not p["packetNumber"] in self.messages[packetsID]["dataOrder"]: - self.messages[packetsID]["data"].append(p["data"]) - self.messages[packetsID]["dataOrder"].append(p["packetNumber"]) - if (len(self.messages[packetsID]["data"])) >= ( - self.messages[packetsID]["packetCount"] - 1 - ) and ("wantFullResponse" in self.messages[packetsID].keys()): - self.cLog(20, "Finished receiving for message " + str(packetsID)) - self.messages[packetsID]["finished"] = True - if self.messages[packetsID]["wantFullResponse"] != False: - # TO DO: implement loop - # responseLoop(packets_id) - pass - completeMessage = self.messages[packetsID] - completeMessage["data"] = Message.reassemble(None, completeMessage) - del self.messages[packetsID] - self.completed.append(packetsID) - self.cLog(20, "Assembly completed, routing") - await self.protoRoute(completeMessage) + if not (packetsID in self.messages) and ( + not "wantFullResponse" in p.keys() + ): + if "primaryMessage" in p.keys(): + if p["primaryMessage"] in self.messages.keys(): + # TODO: Temporary store for submessages if main hasnt arrived + # TODO: Check for submessages when instantiating + await self.messages[p["primaryMessage"]].processPacket(p, subMessage=True) + else: + if not str(p["packetsID"]) in self.submessages.keys(): + self.submessages[str(p["packetsID"])] = { + "data": [], + "dataOrder": [] + } + self.submessages[str(p["packetsID"])]["data"].append(p["data"]) + self.submessages[str(p["packetsID"])]["dataOrder"].append(p["packetNumber"]) + #await self.messages[p["primaryMessage"]].processPacket(p, subMessage=True) + + else: + self.messages[packetsID] = await self.protoMap( + p["packetsClass"], packetsID, p["packetCount"], 0 + ) + await self.messages[packetsID].processPacket(p) + elif "wantFullResponse" in p.keys(): + if packetsID in self.messages.keys(): + self.messages[packetsID].yctx["pAction"]["val"] = p["pAction"] + self.messages[packetsID].yctx["sourceNode"]["val"] = p["sourceNode"] + # self.messages[packetsID].submessageIDs = p["submessages"] + await self.messages[packetsID].processPacket(p) + else: + self.messages[packetsID] = await self.protoMap( + p["packetsClass"], + packetsID, + p["packetCount"], + p["sourceNode"], + submessagesIDs=p["submessages"], + pAction=p["pAction"] + ) + await self.messages[packetsID].processPacket(p) + else: + await self.messages[packetsID].processPacket(p) + except Exception: - self.cLog(30, traceback.format_exc()) + logger.log(30, traceback.format_exc()) diff --git a/src/Transceiver/Transceiver.py b/src/Transceiver/Transceiver.py index b56a446e..fb64f9c1 100644 --- a/src/Transceiver/Transceiver.py +++ b/src/Transceiver/Transceiver.py @@ -1,13 +1,25 @@ import meshtastic import meshtastic.serial_interface + from pubsub import pub from Packets.Message import Message +from Packets.Messages.Protocols.map.Announce import AnnounceMessage + +from Packets.SubMessage import SubMessage + import time +import sys +import random import msgpack import asyncio +import uuid +import traceback +import logging + +logger = logging.getLogger("__main__." + __name__) class Transceiver: """ @@ -50,70 +62,75 @@ class Transceiver: """ - def __init__(self, device, filter, onodeID, cache, catch, cryptographyInfo, cLog): - self.cLog = cLog - self.cryptographyInfo = cryptographyInfo - self.filter = filter - self.tcache = cache - self.tcatch = catch - self.notConnected = True - self.messages = {} - self.acks = {} - self.onodeID = onodeID - # Be careful with this - self.cpid = 0 - self.tasks = {} - # TODO: use node id to deliver directly - pub.subscribe(self.onReceive, "meshtastic.receive") - pub.subscribe(self.onConnection, "meshtastic.connection.established") - self.interface = meshtastic.serial_interface.SerialInterface(device) - i = 0 - while self.notConnected: - if i % 5000000 == 0: - self.cLog(20, "Waiting for node initialization...") - i += 1 - self.cLog(20, "Initialized") + def __init__(self, device, filter, onodeID, cache, catch, cryptographyInfo, network): + try: + self.busy = False + self.network = network + self.cryptographyInfo = cryptographyInfo + self.filter = filter + self.tcache = cache + self.tcatch = catch + self.notConnected = True + self.messages = {} + self.acks = {} + self.onodeID = onodeID + # Be careful with this + self.cpid = 0 + self.tasks = {} + pub.subscribe(self.onReceive, "meshtastic.receive") + pub.subscribe(self.onConnection, "meshtastic.connection.established") + self.interface = meshtastic.serial_interface.SerialInterface(device) + i = 0 + while self.notConnected: + if i % 5000000 == 0: + logger.log(20, "Waiting for node initialization...") + i += 1 + logger.log(20, "Initialized") + except Exception as e: + logger.log(30, traceback.format_exc()) + raise e # TODO: Sending packets across multiple nodes/load balancing/distributed packet transmission/reception def onReceive(self, packet, interface): """ Run each received packet through Sponge.base.Filters sieve using a new event loop """ + self.busy = True asyncio.new_event_loop().run_until_complete(self.filter.sieve(packet)) + self.busy = False self.tcache.refresh() - async def sendAnnounce(self): + async def sendAnnounce(self, dontRespond=False): """ Send an announce packet (contains basic network mapping information) every so often so new nodes autoconnect """ - await self.addPackets( - msgpack.dumps( - { - "onodeID": self.onodeID, - "mnodeID": self.interface.localNode.nodeNum, - } - ), - self.onodeID, - None, - True, - None, - packetsClass=0, - ) + try: + r = AnnounceMessage( + self.onodeID, + 0, + self.onodeID, + self.cryptographyInfo, + {"onodeID": self.onodeID, "mnodeID": self.interface.localNode.nodeNum, "dontRespond": dontRespond}, + ) + await self.sendMessage(r) + except: + logger.log(30, traceback.format_exc()) def onConnection(self, interface, topic=pub.AUTO_TOPIC): """ When the node connects start announce loop and end the waiting loop """ - asyncio.run(self.sendAnnounce()) + #asyncio.run(self.sendAnnounce()) self.notConnected = False def responseCheck(self, packet): """ On acknowldgement response set acks based on response + TODO: Stop this being sent to sieve """ rid = packet["decoded"]["requestId"] if packet["decoded"]["routing"]["errorReason"] == "MAX_RETRANSMIT": - self.cLog(20, "Got ack error") + logger.debug(f"Got ack error for packet {rid}") self.acks[str(rid)] = False else: self.acks[str(rid)] = True @@ -235,12 +252,13 @@ class Transceiver: if recipientNode == None: self.send(p) else: - self.cLog(10, "Sending target: " + str(directID)) + logger.log(10, "Sending target: " + str(directID)) if directID != False: recipientNode = directID self.send(p, recipientNode=recipientNode) awaitTask = asyncio.create_task(self.awaitResponse(self.cpid)) await asyncio.sleep(1) + # TODO: pid fix currentTask = { "ob": awaitTask, "pid": str(self.cpid), @@ -249,30 +267,79 @@ class Transceiver: } self.tasks[str(self.cpid)] = currentTask - async def sendMessage(self, message, recipientNode=None): - for p in message.packets: - if recipientNode == None: - self.send(p) - else: - self.cLog(10, "Sending target: " + str(recipientNode)) - self.send(p, recipientNode=recipientNode) - awaitTask = asyncio.create_task(self.awaitResponse(self.cpid)) - await asyncio.sleep(1) - currentTask = { - "ob": awaitTask, - "pid": str(self.cpid), - "packet": p, - "retry": False, - } - self.tasks[str(self.cpid)] = currentTask + async def sendMessage(self, message: Message): + + try: + # self.busy = True + # TODO: Send nonce subMessage after main message + # TODO: Instantiate SubMessages with HeaderPacket of primary message + hpacket = message.fullPackets[0] + logger.log(30, message.nonce) + nsm = SubMessage( + hpacket.sender, + hpacket.senderDisplayName, + hpacket.sourceNode, + hpacket.recipient, + hpacket.recipientNode, + self.cryptographyInfo, + hpacket.packetsClass, + hpacket.pAction, + {"nonce": message.nonce, "tag": message.tag}, + target=message.target, + primaryMessage=hpacket.packetsID + ) + message.fullPackets[0].submessages.append(nsm.packetsID) + message.packets[0] = message.fullPackets[0].dump() + logger.log(30, message.packets[0]) + logger.log(30, nsm.packets) + for m in [{"primary": message}, {"nonce": nsm}]: + while self.busy: + await asyncio.sleep(2) + m = [v for v in m.values()][0] + # logger.log(30, m) + await asyncio.sleep(5) + isHead = True + curPacketCount = len(m.packets) + for it, p in enumerate(m.packets): + it += 1 + pid = str(uuid.uuid4()) + #print(p) + logger.info(f"Packet {it:03d}/{curPacketCount:03d}, Size is: " + str(sys.getsizeof(p))) + doNumber = 3 + if isHead: + doNumber = 5 + isHead = False + if m.target == False: + logger.log(10, "Sending any") + for i in range(doNumber): + self.send(p) + await asyncio.sleep(1) + else: + logger.log(10, "Sending target: " + str(m.recipientNode)) + for i in range(doNumber): + self.send(p, recipientNode=self.network.doLookup(m.recipientNode)) + await asyncio.sleep(1) + awaitTask = asyncio.create_task(self.awaitResponse(pid)) + + currentTask = { + "ob": awaitTask, + "pid": pid, + "packet": p, + "retry": False, + } + self.tasks[pid] = currentTask + await asyncio.sleep(3) + except Exception: + logger.log(30, traceback.format_exc) + self.busy = False async def progressCheck(self): """ Checks if acknowldgement was received per packet and if not resends """ while True: - await asyncio.sleep(90) - self.cLog( + await asyncio.sleep(480) + logger.log( 20, "Checking progress of {0} tasks".format(len(self.tasks.keys())) ) doneFlag = True @@ -289,12 +356,12 @@ class Transceiver: elif retry < 3: retry += 1 else: - self.cLog(30, "Too many retries") + logger.log(30, "Too many retries") remove = True if remove: del self.tasks[task["pid"]] else: - self.cLog(20, "Doing retry") + logger.log(20, "Doing retry") doneFlag = False # TODO: Resend to specific node self.send(task["packet"]) @@ -317,7 +384,17 @@ class Transceiver: """ Announce loop runner """ + while (self.notConnected or self.busy): + if self.busy: + logger.log(30, "Transceiver is busy, waiting") + await asyncio.sleep(2) while True: - self.cLog(10, "Announce") + wait = random.randrange(600, 900) + logger.info(f"Waiting {wait} seconds for next announce") + await asyncio.sleep(wait) + while self.busy: + logger.info("Transceiver is busy, waiting") + await asyncio.sleep(2) + await asyncio.sleep(5) + logger.info("Announce") await self.sendAnnounce() - await asyncio.sleep(180) diff --git a/src/run.py b/src/run.py index 03a56008..a3c2f7b8 100755 --- a/src/run.py +++ b/src/run.py @@ -8,40 +8,46 @@ from Daisy.CryptographyUtil import SteelPetal from Splash.serve import Server from Transceiver.Transceiver import Transceiver from Cryptography.WhaleSong import Transport -from ui import TUI + import Components.hopper as hopper from Packets.Messages.Protocols.hopper.Response import HopperResponse from Packets.Messages.Protocols.catch.Response import CatchResponse +from Packets.Messages.Protocols.cryptography.Handshake import Handshake +import tlog +from tlog import VHandler # Generic imports import logging import os import asyncio -import sys import time import datetime import traceback import threading import random -import lzma import argparse import configparser - +import shutil +import queue +import json # Process management library import psutil -import msgpack +import uvloop +# TODO: Switch config to toml +# TODO: Better protocol management +# TODO: Dry run if __name__ == "__main__": - global nodeOb, tuiOb, argConfig, config """ - Global objects for the PierMesh service and the TUI so we can terminate the associated processes later + Global objects for the PierMesh service and the TUI so we can terminate the associated processes later and configuration objects for the command line and config (.piermesh) """ + logger = "" + passkey = input("Enter node decryption key: ") + #psk = input("Enter psk: ") nodeOb = None - tuiOb = None - # Pull startup parameters parser = argparse.ArgumentParser() parser.add_argument("-d", "--device", help="Set transceiver device path") parser.add_argument("-p", "--port", help="Web UI server port") @@ -52,11 +58,28 @@ if __name__ == "__main__": parser.add_argument( "-o", "--override", help="Whether to override config", default=False ) - parser.add_argument("-x", "--showTUI", help="Whether to show TUI", default=True) + parser.add_argument("-x", "--showTUI", + help="Whether to show TUI", default=True) + parser.add_argument( + "-l", + "--confList", + help="Comma separated list of conf files to load for multi-node mode (TODO)", + default=False, + ) + parser.add_argument( + "-k", + "--PSK", + help="Pre shared key for initializing communications", + default=False, + ) + argConfig = parser.parse_args() config = configparser.ConfigParser() if argConfig.confList != False: pass + if not os.path.exists(".piermesh"): + print("No config at .piermesh, duplicating and loading example config...") + shutil.copyfile(".piermesh.example", ".piermesh") config.read(".piermesh") device = "" @@ -80,7 +103,7 @@ if __name__ == "__main__": webPort = int(webPort) delay = config["DEFAULT"]["StartupDelay"] if argConfig.override: - delay = argConfig.delay + delay = argConfig.startupDelay else: if "StartupDelay" in config["OPERATOR_OVERRIDES"]: delay = config["OPERATOR_OVERRIDES"]["StartupDelay"] @@ -98,16 +121,18 @@ if __name__ == "__main__": if "ShowTUI" in config["OPERATOR_OVERRIDES"]: showTUI = config["OPERATOR_OVERRIDES"]["ShowTUI"] showTUI = bool(showTUI) - - # Set up file based logging - logPath = "logs" - fileName = datetime.datetime.now().strftime("%m%d%Y_%H%M%S") - logFormat = "%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s" - logging.basicConfig( - format=logFormat, - level=logging.INFO, - filename="{0}/{2}_{1}.log".format(logPath, fileName, nodeNickname), - ) + psk = False + if argConfig.override: + if argConfig.PSK == False: + print("No PSK set quitting...") + exit(0) + else: + psk = argConfig.PSK + #else: + if "PSK" in config["OPERATOR_REQUIRED"]: + psk = config["OPERATOR_REQUIRED"]["PSK"] + print("Staggering {0} seconds please wait".format(delay)) + time.sleep(delay) class Node: @@ -160,39 +185,80 @@ class Node: """ def __init__(self): - self.toLog = [] actionsList = [f for f in dir(self) if "action" in f] self.actions = {} for a in actionsList: self.actions[a.split("_")[1]] = getattr(self, a) self.todo = [] - self.cLog(20, "Past action mapping") + logger.log(20, "Past action mapping") self.network = Network() - self.catch = Catch(walk=True) - self.cache = Cache(walk=True) - self.remoteCatchIndex = Index(nodeNickname) - self.daisyCryptography = None + self.daisyCryptography = SteelPetal(passkey) + # TODO: Better directory structure + self.catch = Catch(self.daisyCryptography, walk=True, path=nodeNickname) + + self.cache = Cache(self.daisyCryptography, walk=True) + self.cache.get("mlookup").json_to_msg("daisy/mlookup") + logger.info("Protocols:\n" + json.dumps(self.cache.get("mlookup").get(), indent=4)) + self.remoteCatchIndex = Index(nodeNickname, self.daisyCryptography) + serverInfoFile = nodeNickname + ".info" self.nodeInfo = self.cache.get(serverInfoFile) if self.nodeInfo == False: - self.cache.create(serverInfoFile, {"nodeID": random.randrange(0, 1000000)}) + # TODO: Get/set toml + self.cache.create( + serverInfoFile, {"nodeID": random.randrange(0, 1000000)}) self.nodeInfo = self.cache.get(serverInfoFile) self.network.addin(self.nodeInfo.get()["nodeID"]) - self.cLog(20, "Siph network stack initialized") + logger.log(20, "Siph network stack initialized") self.onodeID = str(self.nodeInfo.get()["nodeID"]) - self.server = None - self.sponge = Filter(self.cache, self.onodeID, self.todo, self.cLog) - self.cLog(20, "Filter initialized") - self.cLog(10, "Command line arguments: " + ", ".join(sys.argv)) - self.oTransceiver = None - self.cLog(20, "Cryptography initializing") - self.cryptographyInfo = Transport(self.cache, nodeNickname, self.cLog) - self.cLog(20, "Cryptography initialized") + + logger.log(20, "Cryptography initializing") + self.cryptographyInfo = Transport( + self.cache, nodeNickname, self.daisyCryptography, psk + ) + logger.log(20, "Cryptography initialized") + + self.sponge = Filter(self.cache, self.onodeID, + self.todo, self.cryptographyInfo) + logger.log(20, "Filter initialized") + + # TODO: Run in different thread, dispatch through queue + self.oTransceiver = Transceiver( + device, + self.sponge, + self.onodeID, + self.cache, + self.catch, + self.cryptographyInfo, + self.network + ) + self.server = Server( + self.oTransceiver, + self.catch, + self.onodeID, + self.network, + self.cryptographyInfo, + self.remoteCatchIndex, + self.cache, + ) self.processed = [] self.proc = psutil.Process(os.getpid()) self.mTasks = {} + async def main(self): + self.mTasks["list"] = asyncio.create_task(self.spongeListen()) + await asyncio.sleep(1) + # self.mTasks["pct"] = asyncio.create_task(self.oTransceiver.progressCheck()) + # await asyncio.sleep(1) + self.mTasks["mon"] = asyncio.create_task(self.monitor()) + await asyncio.sleep(1) + self.mTasks["announce"] = asyncio.create_task( + self.oTransceiver.announce()) + await asyncio.sleep(1) + await self.server.app.start_server(port=int(webPort)) + def cLog(self, priority: int, message: str): + logger = logging.getLogger(__name__) """ Convenience function that logs to the ui and log files @@ -209,8 +275,11 @@ class Node: ------- None """ - logging.log(priority, message) - self.toLog.append("[{0}]:\n{1}".format(datetime.datetime.now(), message)) + + logger.log(priority, message) + # pass + # logging.log(priority, message) + # self.toLog.append("[{0}]:\n{1}".format(datetime.datetime.now(), message)) async def fsInit(self): # TODO: Flesh out and properly link everything @@ -230,8 +299,8 @@ class Node: memmb = self.proc.memory_info().rss / (1024 * 1024) memmb = round(memmb, 2) cpup = self.proc.cpu_percent(interval=1) - self.cLog( - 20, + logger.log( + 10, " MEM: {0} mB | CPU: {1} %".format( memmb, cpup, @@ -240,7 +309,7 @@ class Node: tuiOb.do_set_cpu_percent(float(cpup)) tuiOb.do_set_mem(memmb) else: - self.cLog(20, "No TUI object, waiting 5 seconds...") + logger.log(20, "No TUI object, waiting 5 seconds...") await asyncio.sleep(5) async def spongeListen(self): @@ -256,16 +325,21 @@ class Node: We use a common technique here that calls the function from our preloaded actions via dictionary entry """ while True: - while (len(self.todo) >= 1) & (len(tuiOb.todo) >= 1): - todoNow = None - if len(self.todo) > 0: - todoNow = self.todo.pop() - else: - todoNow = tuiOb.todo.pop() - action = todoNow["action"] - self.cLog(20, "Action: " + action) - data = todoNow["data"] - await self.actions[action](data) + # logger.log(10, "Sponge listening...") + # logger.log(10, self.todo) + while (len(self.todo) >= 1): + try: + logger.log(10, "In todo") + todoNow = self.todo[0] + action = todoNow.getAction() + logger.log(20, "Action: " + action) + data = todoNow.getData() + logger.log(20, "Doing action") + await self.actions[action](data) + logger.log(20, "After action") + except Exception: + logger.log(20, traceback.format_exc()) + self.todo.pop() await asyncio.sleep(1) async def action_sendToPeer(self, data: dict): @@ -285,45 +359,69 @@ class Node: webui.serve.Server.sendToPeer: Function to actually execute the action """ - self.server.sendToPeer(data["recipient"], data["res"]) + logger.info(data) + await self.server.sendToPeer(data["recipient"], data["data"], data["target"]) async def action_sendCatch(self, data: dict): """ Get catch and return the data to a peer """ - res = self.catch.get(data["head"], data["body"], fins=data["fins"]) + # TODO: Seperators + if "fins" in data: + res = self.catch.get(data["head"], data["body"], fins=data["fins"]) + else: + res = self.catch.get(data["head"], data["body"]) + logger.info(res) + # TODO: Manually connect two nodes r = CatchResponse( - self.nodeInfo.onodeID, + self.onodeID, 000000, - self.nodeInfo.onodeID, + self.onodeID, data["recipient"], data["recipientNode"], self.cryptographyInfo, res, + pskEncrypt=True ) - self.oTransceiver.sendMessage(r) + await self.oTransceiver.sendMessage(r) async def action_cLog(self, data: dict): - self.cLog(data["priority"], data["message"]) + logger.log(data["priority"], data["message"]) async def action_routeCatch(self, data: dict): - self.server.sendToPeer(data["recipient"], data["html"]) + await self.server.sendToPeer(data["recipient"], data["html"], data["target"]) async def action_syncIndex(self, data: dict): for entry in data["index"]: self.remoteCatchIndex.addEntry(entry) async def action_map(self, data: dict): + # TODO: Normalize node ids to strings """ Map new network data to internal network map See Also -------- - Siph.network.Network: Layered graph etwork representation + Siph.network.Network: Layered graph network representation """ - self.network.addLookup(data["onodeID"], data["mnodeID"]) - self.cLog(20, "Lookup addition done") - self.network.addon(data["onodeID"]) + if self.cryptographyInfo.getRecord("key", data["onodeID"]) == False: + logger.log(20, "In map") + await self.network.addLookup(data["onodeID"], data["mnodeID"]) + logger.log(20, "After add lookup") + logger.log(20, "Adding public key") + self.cryptographyInfo.addPublickey(data["onodeID"], data["publicKey"]) + logger.log(20, "Public key addition done") + self.network.omap.add_node(data["onodeID"]) + logger.log(20, "Added node to outer map") + else: + logger.log(20, "Node already exists skipping addition to map") + if "sessionKey" not in self.cryptographyInfo.getRecord("key", data["onodeID"]).keys(): + if not data["dontRespond"]: + #await self.oTransceiver.sendAnnounce(dontRespond=True) + #await asyncio.sleep(180) + h = Handshake(self.nodeInfo.get()["nodeID"], self.nodeInfo.get()["nodeID"], data["onodeID"], data["onodeID"], self.cryptographyInfo, data["onodeID"], self.onodeID) + await self.oTransceiver.sendMessage(h) + async def action_initCryptography(self, data: dict): """ @@ -333,9 +431,12 @@ class Node: -------- Cryptography.DHEFern.DHEFern: End to end encryption functionality """ - self.cryptographyInfo.sessionSetup( - data["recipientNode"], data["publicKey"], data["peerEphemeralKey"] - ) + # TODO: Response message + # TODO: Initialize if not initialized + self.cryptographyInfo.sessionSetup(data["yctx"]["sourceNode"]["val"], data["ephemeralKey"]) + logger.log(20, "Cryptography handshake complete") + # TODO: Now encrypt all communications node to node with session encryption + # TODO: Expiration? async def action_hop(self, data): try: @@ -352,18 +453,19 @@ class Node: r = HopperResponse( self.onodeID, 000000, + self.onodeID, data["recipient"], data["recipientNode"], r, self.cryptographyInfo, ) - self.oTransceiver.sendMessage(r) + await self.oTransceiver.sendMessage(r) except: - self.cLog(30, traceback.format_exc()) + logger.log(30, traceback.format_exc()) - async def routeHop(self, data: dict): - self.server.sendToPeer(data["recipient"], data["res"]) + async def action_routeHop(self, data: dict): + await self.server.sendToPeer(data["recipient"], data["res"], data["target"]) async def action_addPSK(self, data): # TODO: Switch to credential @@ -371,85 +473,50 @@ class Node: self.cryptographyInfo.update(data["nodeID"], {"PSK": data["PSK"]}) -async def logPassLoop(): - """ - Loop to pass logs up to the TUI - - See Also - -------- - ui.TUI: TUI implementation - """ - global tuiOb, nodeOb - while True: - await asyncio.sleep(1) - if tuiOb == None or nodeOb == None: - await asyncio.sleep(1) - elif tuiOb.done: - tuiOb.exit() - os.system("reset") - print("Terminating PierMesh service...") - nodeOb.proc.terminate() - else: - ctoLog = [l for l in nodeOb.toLog] - for l in ctoLog: - tuiOb.do_write_line(l) - nodeOb.toLog.pop() - - async def main(): """ - Main method for running the PierMesh service + Kick on main method for running the PierMesh service """ - global nodeOb try: - passkey = input("Enter node decryption key: ") nodeOb = Node() - nodeOb.daisyCryptography = SteelPetal(passkey, nodeOb.cLog) - nodeOb.cLog(20, "Starting up") - nodeOb.cLog(20, "Staggering {0} seconds, please wait".format(sys.argv[4])) - time.sleep(int(sys.argv[4])) - nodeOb.oTransceiver = Transceiver( - sys.argv[1], - nodeOb.sponge, - nodeOb.onodeID, - nodeOb.cache, - nodeOb.catch, - nodeOb.cryptographyInfo, - nodeOb.cLog, - ) - nodeOb.server = Server( - nodeOb.oTransceiver, - nodeOb.catch, - nodeOb.onodeID, - nodeOb.network, - nodeOb.cLog, - nodeOb.cache, - ) - nodeOb.mTasks["list"] = asyncio.create_task(nodeOb.spongeListen()) - await asyncio.sleep(1) - nodeOb.mTasks["pct"] = asyncio.create_task(nodeOb.oTransceiver.progressCheck()) - await asyncio.sleep(1) - nodeOb.mTasks["mon"] = asyncio.create_task(nodeOb.monitor()) - await asyncio.sleep(1) - nodeOb.mTasks["announce"] = asyncio.create_task(nodeOb.oTransceiver.announce()) - await asyncio.sleep(1) - await nodeOb.server.app.start_server(port=int(sys.argv[2]), debug=True) - except Exception: - logging.log(20, traceback.format_exc()) + await nodeOb.main() + except: + logger.log(20, traceback.format_exc()) + + +def initLogger(nodeName, tolog): + global logger + logger = logging.getLogger(__name__) + logger.propagate = False + logger.setLevel(logging.DEBUG) + + vh = VHandler(logging.DEBUG, tolog) + + formatter = logging.Formatter( + '%(asctime)s - %(name)s - %(levelname)s - %(message)s') + + vh.setFormatter(formatter) + + logger.addHandler(vh) + + dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") + + fh = logging.FileHandler(f"logs/{nodeName}_{dt}.log") + fh.setFormatter(formatter) + + logger.addHandler(fh) if __name__ == "__main__": try: - mainThread = threading.Thread(target=asyncio.run, args=(main(),)) + tolog = queue.Queue() + initLogger(nodeNickname, tolog) + mainThread = threading.Thread(target=uvloop.run, args=(main(),)) mainThread.start() + # TODO: Waiting for this to finish when no tui + # TODO: Logging to screen when no tui if showTUI: - lplThread = threading.Thread(target=asyncio.run, args=(logPassLoop(),)) - lplThread.start() - tuiOb = TUI() - tuiOb.nodeOb = nodeOb - tuiOb.run() + tlog.runLogUI(tolog, nodeNickname) except: - try: - nodeOb.cLog(30, traceback.format_exc()) - except: - logging.log(30, traceback.format_exc()) + print(traceback.format_exc()) + os._exit(1) diff --git a/src/scripts/falin b/src/scripts/falin index 73e56083..ebf68210 100755 --- a/src/scripts/falin +++ b/src/scripts/falin @@ -1 +1 @@ -python run.py /dev/ttyACM1 5000 server2.info 0 falin +python run.py -d /dev/ttyACM1 -p 5001 -s 75 -n falin -k jgf765!FS0+6 -o True diff --git a/src/setup.fish b/src/setup.fish new file mode 100644 index 00000000..c9ad4ed2 --- /dev/null +++ b/src/setup.fish @@ -0,0 +1 @@ +set TERM xterm-256color diff --git a/src/setup.sh b/src/setup.sh new file mode 100644 index 00000000..c58352ab --- /dev/null +++ b/src/setup.sh @@ -0,0 +1 @@ +TERM=xterm-256color diff --git a/src/stale/router.py b/src/stale/router.py index 3ff4a1f1..4d20f848 100644 --- a/src/stale/router.py +++ b/src/stale/router.py @@ -18,7 +18,7 @@ class Router: self.network = Network() self.catch = Catch(walk=True) self.cache = Cache(walk=True) - self.cLog(10, "Loading server info") + logger.log(10, "Loading server info") self.serverInfo = self.cache.get(nfpath) if self.serverInfo == False: self.cache.create(nfpath, {"nodeID": random.randrange(0, 1000000)}) @@ -34,4 +34,4 @@ class Router: return self.c.get(head, tail, fins=fins) def addc(self, peer, node, seperator, head, tail, data, fins=None): - self.c.addc(peer, node, seperator, head, tail, data, fins=fins) + self.c.addc(peer, node, seperator, head, tail, data, fins=fins) \ No newline at end of file diff --git a/src/tlog.py b/src/tlog.py new file mode 100644 index 00000000..6b7c7550 --- /dev/null +++ b/src/tlog.py @@ -0,0 +1,174 @@ +import curses +from curses import wrapper + +import os +import logging +import queue +import shutil +import time + +import psutil + +# TODO: Multi node mode +# TODO: Tab per node + + +class VHandler(logging.Handler): + tolog = queue.Queue() + + def __init__(self, level, tolog): + super().__init__(level) + self.tolog = tolog + + def emit(self, record): + r = self.format(record) + self.tolog.put([r, record.levelno]) + +""" +def initLogger(nodeName, tolog): + logger = logging.getLogger(__name__) + + logger.setLevel(logging.DEBUG) + # Is tolog variable creating an error that cant be logged + # and logging is defaulting + # Check if this reference is causing an issue + vh = VHandler(logging.DEBUG, tolog) + + formatter = logging.Formatter( + '%(asctime)s - %(name)s - %(levelname)s - %(message)s') + + vh.setFormatter(formatter) + + logger.addHandler(vh) + + dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") + + fh = logging.FileHandler(f"logs/{nodeName}_{dt}.log") + fh.setFormatter(formatter) + + logger.addHandler(fh) + return logger +""" + + +def logUI(stdscr, tolog, nodeNickname): + logcache = [] + height, width = shutil.get_terminal_size((49, 27)) + if height < 28 or height < 28: + print("Console too small or couldnt retrieve size, please switch to no tui mode, exiting...") + exit() + logger = logging.getLogger("__main__." + __name__) + p = psutil.Process(os.getpid()) + + curses.start_color() + stdscr.keypad(True) + + stdscr.nodelay(True) + curses.init_color(9, 200, 200, 200) + grey = 9 + + curses.init_pair(1, curses.COLOR_WHITE, curses.COLOR_BLACK) + curses.init_pair(2, curses.COLOR_BLACK, curses.COLOR_GREEN) + curses.init_pair(3, curses.COLOR_WHITE, grey) + + curses.init_color(curses.COLOR_WHITE, 1000, 1000, 1000) + curses.init_color(curses.COLOR_YELLOW, 1000, 1000, 0) + curses.init_color(curses.COLOR_GREEN, 0, 1000, 0) + + curses.init_color(logging.DEBUG, 0, 1000, 0) + curses.init_pair(logging.DEBUG, 0, logging.DEBUG) + curses.init_color(logging.INFO, 1000, 1000, 1000) + curses.init_pair(logging.INFO, 0, logging.INFO) + curses.init_pair(logging.WARNING, 0, curses.COLOR_YELLOW) + curses.init_pair(logging.ERROR, curses.COLOR_WHITE, curses.COLOR_RED) + curses.init_color(logging.CRITICAL, 1000, 0, 0) + curses.init_pair(logging.CRITICAL, curses.COLOR_WHITE, logging.CRITICAL) + icon = [] + with open("piermesh-mini.ascii", "r") as f: + icon = f.read().split("\n") + + stdscr.bkgd(' ', curses.color_pair(1)) + stdscr.clear() + + iwin = curses.newwin(13, 50, 25, 0) + iwin.bkgd(" ", curses.color_pair(3)) + for it, line in enumerate(icon[:-1]): + iwin.addstr(it+1, 0, " " + line, curses.color_pair(3)) + iwin.addstr(10, 2, " Keys: q to abort, ↑ scroll up", curses.color_pair(2)) + iwin.addstr(11, 2, " ↓ scroll down ", curses.color_pair(2)) + + head = curses.newwin(4, 50, 0, 0) + head.bkgd(" ", curses.color_pair(3)) + head.addstr(1, 1, "PierMesh TUI", curses.color_pair(3)) + head.addstr(2, 1, "Logs:", curses.color_pair(3)) + head.border(' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ') + + logpad = curses.newpad(1000, 300) + logpad.bkgdset(" ", curses.color_pair(2)) + + start = 0 + gen = 0 + lastr = time.time() + while True: + if gen == 0 or ((time.time()-lastr) > 2): + lastr = time.time() + stdscr.addstr(23, 0, " System usage: ", curses.color_pair(3)) + + mem = round(p.memory_info().rss/(1024*1024), 2) + cpu = p.cpu_percent(interval=0.1) + stdscr.addstr( + 24, + 0, + f" MEM: {mem} Mb CPU: {cpu}% ", + curses.color_pair(2) + ) + if tolog.empty() != True: + while True: + logcache.insert(0, tolog.get()) + tolog.task_done() + if tolog.qsize() < 1: + break + if len(logcache) > 100: + logcache = logcache[:100] + logpad.clear() + nextOffset = 0 + for it, message in enumerate(logcache): + msg = message[0] + if len(msg) > width: + msgPartA = msg[:width] + msgPartB = msg[width:] + if len(msgPartB) > width: + msgPartB = msgPartB[:width] + logpad.addstr( + it+nextOffset, 0, " " + msgPartA + " ", curses.color_pair(message[1])) + logpad.addstr( + it+nextOffset+1, 0, " " + msgPartB + " ", curses.color_pair(message[1])) + nextOffset = 1 + else: + logpad.addstr( + it+nextOffset, 0, " " + msg + " ", curses.color_pair(message[1])) + nextOffset = 0 + logpad.refresh(start, 0, 4, 0, 22, width) + stdscr.refresh() + if gen < 1: + iwin.refresh() + head.refresh() + gen += 1 + + ch = stdscr.getch() + if ch == ord("q"): + curses.nocbreak() + stdscr.keypad(False) + curses.echo() + curses.endwin() + os._exit(1) + elif ch == curses.KEY_UP: + if start != 0: + start -= 1 + elif ch == curses.KEY_DOWN: + if start < tolog.qsize(): + start += 1 + + +def runLogUI(tolog, nodeNickname): + wrapper(logUI, tolog, nodeNickname) diff --git a/src/tui.py b/src/tui.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ui.py b/src/ui.py index 37a8b98b..aea40436 100644 --- a/src/ui.py +++ b/src/ui.py @@ -1,3 +1,4 @@ +# TODO: DEPRECATE from textual.app import App, ComposeResult from textual.widgets import Log, Label, Footer, Header, ProgressBar, Input, Button from textual.binding import Binding @@ -75,9 +76,9 @@ class TUI(App): Label(ascii, classes="largeLabel", name="logo", id="logo"), Label("Add/set pre shared key for node\n"), Label("Node ID:"), - Input(placeholder="000000", type="integer", max_length=6, name="pskNodeID", id="pskNodeID") + Input(placeholder="000000", type="integer", max_length=6, name="pskNodeID", id="pskNodeID"), Label("PSK:"), - Input(type="text", max_length=6, name="psk", id="psk") + Input(type="text", max_length=6, name="psk", id="psk"), Button("Add/set PSK", name="addPSK", id="addPSK"), ) yield Vertical( @@ -146,3 +147,4 @@ class TUI(App): if __name__ == "__main__": app = TUI() app.run() +