Compare commits

..

1 Commits
main ... dev

Author SHA1 Message Date
Agie Ashwood 66f3cbe401 Prototype fast forward 2024-11-22 20:34:39 -07:00
85 changed files with 3134 additions and 780 deletions

12
.gitignore vendored
View File

@ -8,3 +8,15 @@ src/Splash/res/js/node_modules/
src/daisy/ src/daisy/
src/catch/ src/catch/
src/logs/ src/logs/
zims/
project.matrix
piermesh.nvim
piermesh
node00/*
node00/
*.index
*.log
*.vim
*.workspace
.workspace
.workspace.backup

View File

@ -1,3 +1,5 @@
home = /usr/bin home = /usr/bin
include-system-site-packages = false include-system-site-packages = false
version = 3.10.12 version = 3.11.2
executable = /usr/bin/python3.11
command = /usr/bin/python3 -m venv /home/ag/Documents/piermesh

View File

@ -42,5 +42,5 @@ This program is free software: you can redistribute it and/or modify it under th
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with this program. If not, see [https://www.gnu.org/licenses/](https://www.gnu.org/licenses/). You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
``` ```

View File

@ -13,3 +13,7 @@ sphinx
textual textual
textual-dev textual-dev
sphinx-markdown-builder==0.6.6 sphinx-markdown-builder==0.6.6
pycryptodome
pyjwt
uvloop
python-lsp-server[all]

14
src/.piermesh Normal file
View File

@ -0,0 +1,14 @@
# DONT TOUCH THIS SECTION UNLESS YOU KNOW WHAT YOURE DOING
[DEFAULT]
Nickname = node00
StartupDelay = 0
WebUIPort = 5000
ShowTUI = True
[OPERATOR_REQUIRED]
TransceiverPort = /dev/ttyACM0
PSK = jgf765!FS0+6
# DO YOUR NON REQUIRED SETTINGS HERE
[OPERATOR_OVERRIDES]

13
src/.piermesh.example Normal file
View File

@ -0,0 +1,13 @@
# DONT TOUCH THIS SECTION UNLESS YOU KNOW WHAT YOURE DOING
[DEFAULT]
Nickname = node00
StartupDelay = 0
WebUIPort = 5000
ShowTUI = True
[OPERATOR_REQUIRED]
# TransceiverPort = /dev/ttyACM0
# DO YOUR SETTINGS HERE
[OPERATOR_OVERRIDES]

0
src/Actions/Actions.py Normal file
View File

0
src/Actions/__init__.py Normal file
View File

View File

@ -1,10 +1,39 @@
from bs4 import BeautifulSoup
import requests import requests
import msgpack import msgpack
import lzma import lzma
from Packets.Message import Message import base64
import mimetypes
import logging
from Packets.Messages.Protocols.hopper.Response import HopperResponse
logger = logging.getLogger("__main__." + __name__)
def downloadFile(url, text=True, mimeType=None):
fbytes = b""
with requests.get(url, stream=True) as r:
r.raise_for_status()
for chunk in r.iter_content(chunk_size=8192):
fbytes += chunk
if text:
return fbytes.decode("utf-8")
else:
if mimeType == None:
mimeType, encoding = mimetypes.guess_type(url)
if mimeType == None:
raise Error(
"Couldnt guess mime type and none was supplied, cant encode to data url"
)
b64str = base64.b64encode(fbytes).decode("utf-8")
dataUrl = "data:{0};base64,{1}".format(mimeType, b64str)
return dataUrl
def get(url: str, params=None): def get(url: str, params=None, followTags=None):
""" """
http/s get request http/s get request
@ -14,10 +43,44 @@ def get(url: str, params=None):
params params
Requests (library) parameters Requests (library) parameters
followTags
None or list of tags to download the src/href from
""" """
logger.debug("Hopping to it")
# TODO: Non blocking requests
# WARN: Do not run self requests until this is fixed
r = requests.get(url, params=params) r = requests.get(url, params=params)
r = {"response": r.text, "code": r.status_code} logger.debug("Content retrieved, parsing")
return Message(lzma.compress(msgpack.dumps(r))).get() r = {
"response": r.text,
"code": r.status_code,
"content-type": r.headers.get("content-type"),
}
logger.debug("Done parsing")
# TODO: Reject followtags if content type is other then html
if followTags != None:
soup = BeautifulSoup(r["response"], "html.parser")
# TODO: Checking for relative links
for tag in followTags:
if tag in ["img", "video"]:
for elem in soup.find_all(tag):
elem["src"] = downloadFile(elem["src"], text=False)
elif tag in ["link"]:
for elem in soup.find_all(tag):
if elem["rel"] == "stylesheet":
style = downloadFile(elem["href"])
elem.decompose()
soup.head.append_tag(soup.new_tag("style", string=style))
elif tag == "script":
for elem in soup.find_all(tag):
script = downloadFile(elem["src"])
elem["src"] = ""
elem.string = script
r["response"] = soup.text
logger.debug("Done hopping")
return r
def post(url: str, params=None): def post(url: str, params=None):
@ -33,4 +96,4 @@ def post(url: str, params=None):
""" """
r = requests.post(url, data=params) r = requests.post(url, data=params)
r = {"response": r.text, "code": r.status_code} r = {"response": r.text, "code": r.status_code}
return Message(lzma.compress(msgpack.dumps(r))).get() return r

7
src/Config/Args.py Normal file
View File

@ -0,0 +1,7 @@
def byFile():
pass
def byPrompt():
pass

22
src/Config/Context.py Normal file
View File

@ -0,0 +1,22 @@
class Context:
def __init__(self, subsets: dict={}, **kwargs):
# Subsets should be a dict of list of value keys
self.ctx = {}
self.subsets = subsets
for key, val in kwargs.items():
self.ctx[key] = {
"val": val,
"type": type(val)
}
def __getitem__(self, idx):
return self.ctx[idx]
def getSubset(self, subset):
if subset in self.subsets.keys():
res = {}
for key in self.subsets[subset]:
res[key] = self.ctx[key]["val"]
return res

0
src/Config/__init__.py Normal file
View File

View File

@ -1,24 +1,26 @@
import base64 import base64
import os import os
from cryptography.fernet import Fernet import lzma
from cryptography.hazmat.primitives import hashes import logging
from cryptography.hazmat.primitives.asymmetric import dh
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
from cryptography.hazmat.primitives.serialization import (
Encoding,
NoEncryption,
ParameterFormat,
PublicFormat,
PrivateFormat,
)
import cryptography.hazmat.primitives.serialization as Serialization
import msgpack import msgpack
from Crypto.PublicKey import ECC
from Crypto.Hash import SHAKE128
from Crypto.Protocol.DH import key_agreement
from Crypto.Cipher import AES
from Daisy.Store import Store from Daisy.Store import Store
logger = logging.getLogger("__main__." + __name__)
# TODO: Different store directories per node # TODO: Different store directories per node
# TODO: First time psk transport initiation
# Add this credential manually, its picked up and used when the two nodes try to communicate before the session is encrypted
class DHEFern: class Transport:
""" """
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py>`__ `🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py>`__
@ -47,7 +49,7 @@ class DHEFern:
Private key for node Private key for node
""" """
def __init__(self, cache, nodeNickname, cLog): def __init__(self, cache, nodeNickname, daisyCryptography, psk):
""" """
Parameters Parameters
---------- ----------
@ -61,32 +63,41 @@ class DHEFern:
Reference to `run.Node.cLog` Reference to `run.Node.cLog`
""" """
self.cLog = cLog
self.stores = {} self.stores = {}
self.loadedParams = {}
self.loadedKeys = {}
self.nodeNickname = nodeNickname self.nodeNickname = nodeNickname
self.cache = cache self.cache = cache
if os.path.exists("daisy/cryptography/{0}/param".format(nodeNickname)) == False: self.daisyCryptography = daisyCryptography
self.initStore("param")
else:
self.stores["param"] = Store("param", "cryptography", nodeNickname)
self.params = self.loadParamBytes(self.stores["param"].get()["self"])
self.cLog(20, "Param store initialized")
if os.path.exists("daisy/cryptography/{0}/key".format(nodeNickname)) == False: if os.path.exists("daisy/cryptography/{0}/key".format(nodeNickname)) == False:
self.cLog(20, "Key store DNE, initializing") logger.log(20, "Key store DNE, initializing")
self.initStore("key") self.initStore("key")
self.genKeyPair()
else: else:
self.cLog(20, "Key store exists, loading") logger.log(20, "Key store exists, loading")
self.stores["key"] = Store("key", "cryptography", nodeNickname) self.stores["key"] = Store(
self.cLog(20, "Store loaded") "key", "cryptography", nodeNickname, daisyCryptography
# tks = self.stores["key"].get() )
# self.publicKey = tks["self"]["publicKey"] logger.log(20, "Store loaded")
# self.privateKey = tks["self"]["privateKey"] logger.log(20, "Key store initialized")
self.cLog(20, "Key store initialized") srecord = self.getRecord("key", "self")
if srecord == False:
self.stores["key"].createEmpty("self")
self.stores["key"].update(
"self",
{"PSK": self.daisyCryptography.pad(psk).encode("utf-8")},
write=False
)
if "publicKey" not in self.getRecord("key", "self").keys():
self.addPublickey(None, None, forSelf=True)
else:
self.stores["key"].update("self", {
"publicKey": ECC.import_key(
self.getRecord("key", "self")["publicKey"]
)
}, write=False)
def checkInMem(self, store: str, nodeID: str): def kdf(self, bytesX):
return SHAKE128.new(bytesX).read(32)
def checkInMem(self, store: str, nodeID: str, checkFieldsExist=[]):
""" """
Check if parameters or keys are loaded for node of nodeID Check if parameters or keys are loaded for node of nodeID
@ -99,7 +110,14 @@ class DHEFern:
if store == "param": if store == "param":
return nodeID in self.loadedParams.keys() return nodeID in self.loadedParams.keys()
elif store == "key": elif store == "key":
return nodeID in self.loadedKeys.keys() record = self.getRecord("key", nodeID)
if record != False:
for field in checkFieldsExist:
if not (field in record.keys()):
if field == "staticKey":
self.genStaticKey(nodeID)
elif field == "ourEphemeralKey":
self.genOurEphemeralKey(nodeID)
def loadRecordToMem(self, store: str, nodeID: str): def loadRecordToMem(self, store: str, nodeID: str):
""" """
@ -107,15 +125,16 @@ class DHEFern:
""" """
r = self.getRecord(store, nodeID) r = self.getRecord(store, nodeID)
if r == False: if r == False:
self.cLog( logger.log(
30, "Tried to load nonexistent {0} for node {1}".format(store, nodeID) 30, "Tried to load nonexistent {0} for node {1}".format(store, nodeID)
) )
return False return False
elif self.checkInMem(store, nodeID): elif self.checkInMem(store, nodeID):
self.cLog(10, "{0}s already deserialized, skipping".format(store)) logger.log(10, "{0}s already deserialized, skipping".format(store))
else: else:
if store == "param": if store == "param":
self.loadedParams[nodeID] = self.loadParamBytes(r) self.loadedParams[nodeID] = self.loadParamBytes(r)
"""
elif store == "key": elif store == "key":
self.loadedKeys[nodeID] = { self.loadedKeys[nodeID] = {
"publicKey": Serialization.load_pem_public_key(r["publicKey"]), "publicKey": Serialization.load_pem_public_key(r["publicKey"]),
@ -123,54 +142,37 @@ class DHEFern:
r["privateKey"], None r["privateKey"], None
), ),
} }
"""
return True return True
def getRecord(self, store: str, key: str): def getRecord(self, store: str, key: str, ephemeral=False):
""" """
Get record from store: store with key: key Get record from store: store with key: key
""" """
r = stores[store].getRecord(key) r = self.stores[store].getRecord(key, ephemeral=ephemeral)
if r == False: if r == False:
self.cLog(20, "Record does not exist") logger.log(20, "Record does not exist")
return False return False
else: else:
return r return r
# TODO: Fix stores, URGENT
def initStore(self, store: str): def initStore(self, store: str):
""" """
Initialize store: store Initialize store: store
""" """
self.stores[store] = Store(store, "cryptography", self.nodeNickname) self.stores[store] = Store(
store, "cryptography", self.nodeNickname, self.daisyCryptography
)
if store == "param": if store == "param":
self.genParams() self.genParams()
self.stores[store].update("self", self.getParamsBytes(), recur=False) self.stores[store].update("self", self.getParamsBytes(), recur=False)
elif store == "key": elif store == "key":
self.stores[store].update("self", {}, recur=False) self.stores[store].update("self", {}, recur=False)
else: else:
self.cLog(30, "Store not defined") logger.log(30, "Store not defined")
def genParams(self): def genStaticKey(self, onodeID, paramsOverride=False):
"""
Generate Diffie Hellman parameters
"""
params = dh.generate_parameters(generator=2, key_size=2048)
self.params = params
return params
def getParamsBytes(self):
"""
Get bytes encoded from self.parameters (TODO: Encode from store)
"""
return self.params.parameter_bytes(Encoding.PEM, ParameterFormat.PKCS3)
def loadParamBytes(self, pemBytes: bytes):
"""
Load parameters to self.params from given bytes (TODO: Load from store)
"""
self.params = Serialization.load_pem_parameters(pemBytes)
return self.params
def genKeyPair(self, paramsOverride=False, setSelf: bool = True):
""" """
Generate public and private keys from self.params (TODO: Gen from passed params) Generate public and private keys from self.params (TODO: Gen from passed params)
@ -180,85 +182,99 @@ class DHEFern:
setSelf: bool setSelf: bool
Whether to set self.privateKey and self.publicKey Whether to set self.privateKey and self.publicKey
""" """
privateKey = self.params.generate_private_key() staticKey = ECC.generate(curve="p256")
if setSelf: self.stores["key"].update(
self.privateKey = privateKey onodeID,
publicKey = privateKey.public_key() {
if setSelf: "staticKey": staticKey.export_key(
self.publicKey = publicKey format="PEM", prot_params={"iteration_count": 131072}
self.stores["key"].update(
"self",
{
"publicKey": self.publicKey.public_bytes(
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
),
"privateKey": self.privateKey.private_bytes(
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
),
},
)
return [privateKey, publicKey]
else:
publicKey = publicKey.public_bytes(
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
)
privateKey = privateKey.private_bytes(
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
)
return [privateKey, publicKey]
def keyDerive(self, pubKey: bytes, salt: bytes, nodeID: str, params: bytes):
"""
Derive shared key using Diffie Hellman
pubKey: bytes
Public key
nodeID: str
PierMesh node ID
params: bytes
Encryption parameters
"""
if self.checkInMem("param", nodeID) == False:
if self.getRecord("param", nodeID) == False:
self.updateStore("param", nodeID, params, recur=False)
self.loadRecordToMem("param", nodeID)
self.cLog(20, "Precheck done for key derivation")
# TODO: Load them and if private key exists load it, otherwise generate a private key
if self.checkInMem("key", nodeID) == False:
if self.getRecord("key", nodeID) == False:
privateKey, publicKey = self.genKeyPair(setSelf=False)
self.updateStore(
"key", nodeID, {"publicKey": publicKey, "privateKey": privateKey}
) )
self.loadRecordToMem("key", nodeID) },
sharedKey = self.loadedKeys[nodeID]["privateKey"].exchange(
Serialization.load_pem_public_key(pubKey)
) )
# Perform key derivation. self.stores["key"].update(onodeID, {"staticKey": staticKey}, write=False)
self.cLog(20, "Performing key derivation")
derivedKey = HKDF(
algorithm=hashes.SHA256(), length=32, salt=salt, info=b"handshake data"
).derive(sharedKey)
self.cLog(20, "Derived key")
ederivedKey = base64.urlsafe_b64encode(derivedKey)
tr = self.getRecord("key", nodeID)
tr["derivedKey"] = ederivedKey
self.updateStore("key", nodeID, tr)
self.cLog(20, "Done with cryptography store updates")
return ederivedKey
def getSalt(self): def genOurEphemeralKey(self, onodeID):
""" ourEphemeralKey = ECC.generate(curve="p256")
Get random salt self.stores["key"].update(onodeID, {"ourEphemeralKey": ourEphemeralKey}, write=False)
"""
return os.urandom(16) def addPublickey(self, onodeID, publicKey, forSelf=False):
if forSelf:
publicKey = ECC.generate(curve="p256")
self.stores["key"].update("self", {
"publicKey": publicKey.export_key(
format="PEM",
prot_params={"iteration_count": 131072}
)})
self.stores["key"].update("self", {
"publicKey": publicKey
},
write=False)
else:
# TODO: Fix stores
# self.stores["key"].update(onodeID, {"publicKey": publicKey})
logger.log(20, "Importing keys")
record = self.getRecord("key", onodeID)
if record == False:
self.stores["key"].createEmpty(onodeID)
self.stores["key"].update(onodeID, {"publicKey": publicKey})
self.stores["key"].update(onodeID, {"publicKey": ECC.import_key(publicKey)}, write=False)
def addPeerEphemeralKey(self, onodeID, peerEphemeralKey):
self.stores["key"].update(onodeID, {"peerEphemeralKey": ECC.import_key(peerEphemeralKey)}, write=False)
def sessionSetup(self, onodeID, peerEphemeralKey):
# TODO: Deeper checking before loading
# TODO: Loading existing records
if self.getRecord("key", onodeID) == False:
logger.log(30, "No record, waiting for announce")
else:
self.addPeerEphemeralKey(onodeID, peerEphemeralKey)
self.generateSessionKey(onodeID)
def generateSessionKey(self, onodeID):
# TODO: Gen static key if not exists
# TODO: Gen our ephemeral key if not exists
keysOb = self.getRecord("key", onodeID, ephemeral=True)
if ("publicKey" not in keysOb) or ("staticKey" not in keysOb):
dkeysOb = self.getRecord("key", onodeID)
if ("publicKey" not in keysOb):
self.stores["key"].update(
onodeID, {
"publicKey": ECC.import_key(
dkeysOb["publicKey"]
)
}, write=False
)
if ("staticKey" not in keysOb):
self.stores["key"].update(
onodeID, {
"staticKey": ECC.import_key(
dkeysOb["staticKey"]
)
}, write=False
)
keysOb = self.getRecord("key", onodeID, ephemeral=True)
reget = False
if "staticKey" not in keysOb:
self.genStaticKey(onodeID)
reget = True
if "ourEphemeralKey" not in keysOb:
self.genOurEphemeralKey(onodeID)
reget = True
if reget:
keysOb = self.getRecord("key", onodeID, ephemeral=True)
sessionKey = key_agreement(
static_priv=keysOb["staticKey"],
static_pub=keysOb["publicKey"],
eph_priv=keysOb["ourEphemeralKey"],
eph_pub=keysOb["peerEphemeralKey"],
kdf=self.kdf,
)
self.stores["key"].update(onodeID, {"sessionKey": sessionKey}, write=False)
return sessionKey
# TODO: Build in transport security (node/node) # TODO: Build in transport security (node/node)
def encrypt(self, data, nodeID: str, isDict: bool = True): def encrypt(self, data, nodeID: str, isDict: bool = True, pskEncrypt=False):
""" """
Do Fernet encryption Do Fernet encryption
@ -268,29 +284,73 @@ class DHEFern:
isDict: bool isDict: bool
Whether data is a dictionary Whether data is a dictionary
""" """
r = self.getRecord("key", nodeID) if nodeID == "-00001" or pskEncrypt:
if r == False: cipher = AES.new(self.getRecord("key", "self", ephemeral=True)["PSK"], AES.MODE_GCM)
self.cLog(20, "Node {0} not in keystore".format(nodeID)) nonce = cipher.nonce
return False
else:
derivedKey = r["derivedKey"]
fernet = Fernet(derivedKey)
if isDict: if isDict:
data = msgpack.dumps(data) data = msgpack.dumps(data)
token = fernet.encrypt(data) ciphertext, tag = cipher.encrypt_and_digest(data)
return token return (ciphertext, nonce, tag)
elif (self.getRecord("key", nodeID)) == False:
logger.log(30, "Node {0} not in keychain".format(nodeID))
return False
else:
r = self.getRecord("key", nodeID, ephemeral=True)
if r == False:
r = self.getRecord("key", "self", ephemeral=True)
logger.info(r)
if "sessionKey" in r.keys():
sessionKey = r["sessionKey"]
cipher = AES.new(sessionKey, AES.MODE_GCM)
nonce = cipher.nonce
if isDict:
data = msgpack.dumps(data)
ciphertext, tag = cipher.encrypt_and_digest(data)
return (ciphertext, nonce, tag)
elif "PSK" in r.keys():
cipher = AES.new(r["PSK"], AES.MODE_GCM)
nonce = cipher.nonce
if isDict:
data = msgpack.dumps(data)
ciphertext, tag = cipher.encrypt_and_digest(data)
return (ciphertext, nonce, tag)
else:
logger.log(20, "Node {0} does not have session key".format(nodeID))
def decrypt(self, data, nodeID: str): def decrypt(self, data, nodeID: str, nonce, tag):
""" """
Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load) Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load)
""" """
r = self.getRecord("key", nodeID) # TODO: Handling existing record
if r == False: record = self.getRecord("key", nodeID, ephemeral=True)
self.cLog(20, "No record of node " + nodeID) if (record == False) or ("sessionKey" not in record.keys()):
return False cipher = AES.new(self.getRecord("key", "self", ephemeral=True)["PSK"], AES.MODE_GCM, nonce=nonce)
elif not "derivedKey" in r.keys():
self.cLog(20, "No key derived for node " + nodeID) data = cipher.decrypt(data)
return False logger.log(10, data)
#data = msgpack.loads(data)
data = msgpack.loads(lzma.decompress(data))
logger.log(10, "Decrypt/deserialize output")
logger.log(10, data)
return data
# logger.log(20, "Node {0} not in keychain".format(nodeID))
# return False
else: else:
fernet = Fernet(self.getRecord("key", nodeID)["derivedKey"]) if "sessionKey" in record.keys():
return msgpack.loads(fernet.decrypt(data)) sessionKey = record["sessionKey"]
cipher = AES.new(sessionKey, AES.MODE_GCM, nonce=nonce)
data = cipher.decrypt(data)
data = msgpack.loads(lzma.decompress(data))
return data
elif "PSK" in record.keys():
cipher = AES.new(record["PSK"], AES.MODE_GCM, nonce=nonce)
data = cipher.decrypt(data)
data = msgpack.loads(lzma.decompress(data))
return data
else:
logger.log(20, "Node {0} does not have session key".format(nodeID))
return False

View File

@ -18,6 +18,7 @@ class Cache:
def __init__( def __init__(
self, self,
daisyCryptography,
filepaths=None, filepaths=None,
cacheFile=None, cacheFile=None,
path: str = "daisy", path: str = "daisy",
@ -42,27 +43,31 @@ class Cache:
isCatch: bool isCatch: bool
Whether this cache is for catchs Whether this cache is for catchs
""" """
self.daisyCryptography = daisyCryptography
self.data = {} self.data = {}
self.path = path self.path = path
if not os.path.exists(self.path):
os.makedirs(self.path)
if filepaths != None: if filepaths != None:
for fp in filepaths: for fp in filepaths:
fp = path + "/" + fp fp = path + "/" + fp
if os.path.isfile(fp): if os.path.isfile(fp):
self.data[fp] = Daisy(fp) self.data[fp] = Daisy(fp, daisyCryptography)
elif cacheFile != None: elif cacheFile != None:
with open(cacheFile, "r") as f: with open(cacheFile, "r") as f:
for fp in f.read().split("\n"): for fp in f.read().split("\n"):
self.data[fp] = Daisy(fp) self.data[fp] = Daisy(fp, daisyCryptography)
elif walk: elif walk:
for root, dirs, files in os.walk(self.path): for root, dirs, files in os.walk(self.path):
for p in dirs + files: for p in dirs + files:
if not (".json" in p): if not (".json" in p):
if not (".md" in p): if not (".md" in p):
tpath = root + "/" + p tpath = root + "/" + p
self.data[tpath] = Daisy(tpath) self.data[tpath] = Daisy(tpath, daisyCryptography)
def create(self, path: str, data: dict): def create(self, path: str, data: dict, remote=False):
""" """
Create new record Create new record
@ -74,12 +79,16 @@ class Cache:
data: dict data: dict
Data to populate record with Data to populate record with
""" """
with open(self.path + "/" + path, "wb") as f: if remote == False:
f.write(msgpack.dumps(data)) with open(self.path + "/" + path, "wb") as f:
# logging.log(10, "Done creating record") f.write(msgpack.dumps(data))
self.data[path] = Daisy(self.path + "/" + path) # logging.log(10, "Done creating record")
# logging.log(10, "Done loading to Daisy") self.data[path] = Daisy(self.path + "/" + path, self.daisyCryptography)
return self.data[path] # logging.log(10, "Done loading to Daisy")
return self.data[path]
else:
self.data[path] = Ref(path, remote)
return self.data[path]
def get(self, path: str): def get(self, path: str):
""" """
@ -92,7 +101,7 @@ class Cache:
return self.data[path] return self.data[path]
else: else:
if os.path.exists(self.path + "/" + path): if os.path.exists(self.path + "/" + path):
self.data[path] = Daisy(self.path + "/" + path) self.data[path] = Daisy(self.path + "/" + path, self.daisyCryptography)
return self.data[path] return self.data[path]
else: else:
# logging.log(10, "File does not exist") # logging.log(10, "File does not exist")

View File

@ -1,7 +1,9 @@
from Daisy.Cache import Cache from Daisy.Cache import Cache
from Daisy.Ref import Ref
import os import os
import random import random
import uuid
class Catch(Cache): class Catch(Cache):
@ -16,13 +18,23 @@ class Catch(Cache):
catches = {} catches = {}
def __init__( def __init__(
self, path: str = "catch", filepaths=None, catchFile=None, walk: bool = False self,
daisyCryptography,
path: str = "catch",
filepaths=None,
catchFile=None,
walk: bool = False,
): ):
""" """
Basically the same initialization parameters as Catch Basically the same initialization parameters as Catch
""" """
super().__init__( super().__init__(
filepaths=filepaths, cacheFile=catchFile, path=path, walk=walk, isCatch=True daisyCryptography,
filepaths=filepaths,
cacheFile=catchFile,
path=path,
walk=walk,
isCatch=True,
) )
# TODO: Fins # TODO: Fins
@ -34,7 +46,7 @@ class Catch(Cache):
return super().get(path) return super().get(path)
# TODO: Rename # TODO: Rename
def get(self, head: str, tail: str, fins=None): def get(self, head: str, body: str, fins=None):
""" """
Get catch by pieces Get catch by pieces
@ -49,21 +61,41 @@ class Catch(Cache):
fins fins
List of (maximum 8 characters) strings at the end of the catch oe None if none List of (maximum 8 characters) strings at the end of the catch oe None if none
""" """
r = self.search({"head": head, "tail": tail}) r = ""
return r[0][1]["html"] if fins != None and fins != "":
r = self.search({"head": head, "body": body, "fins": fins})
else:
r = self.search({"head": head, "body": body})
if len(r) < 1:
return False
else:
return r[0][1]["html"]
def addc(self, peer, node, seperator, head, tail, data, fins=None): def addc(self, peer, node, seperator, head, body, data, fins=None, remote=False):
tnpath = "catch/" + node tnpath = f"catch/{node}"
if os.path.exists(tnpath) != True: if os.path.exists(self.path + "/" + tnpath) != True:
os.makedirs(tnpath) os.makedirs(self.path + "/" + tnpath)
tppath = tnpath + "/" + peer tppath = tnpath + "/" + peer
if os.path.exists(tppath) != True: if os.path.exists(self.path + "/" + tppath) != True:
os.makedirs(tppath) os.makedirs(self.path + "/" + tppath)
sid = str(random.randrange(0, 999999)).zfill(6) sid = str(random.randrange(0, 999999)).zfill(6)
data["seperator"] = seperator data["seperator"] = seperator
data["head"] = head data["head"] = head
data["tail"] = tail data["body"] = body
if fins != None: if fins != None:
data["fins"] = fins data["fins"] = fins
res = self.create("{0}/{1}/{2}".format(node, peer, sid), data) res = self.create("{0}/{1}".format(tppath, sid), data, remote=remote)
return [sid, res] return [sid, res]
def genIndex(self, onodeID):
dirList = []
for k, v in self.data.items():
curCatch = {"remoteNode": onodeID}
if type(v.msg) != str:
curCatch = curCatch | v.msg
del curCatch["html"]
dirList.append(curCatch)
return dirList
def mergeIndex(self, remoteIndex):
self.remoteCatchesMap += remoteIndex

10
src/Daisy/Credential.py Normal file
View File

@ -0,0 +1,10 @@
from Daisy.Daisy import Daisy
class Credential(Daisy):
def __init__(self, nodeNickname, credentialName, extension, daisyCryptography):
fname = "data/{0}/{1}.{2}".format(nodeNickname, credentialName, extension)
super().__init__(
fname,
daisyCryptography,
)

View File

@ -0,0 +1,46 @@
from Crypto.Cipher import AES
import traceback
import logging
logger = logging.getLogger("__main__." + __name__)
class SteelPetal:
def __init__(self, key, nonce=None, testData=None):
try:
if nonce == None:
self.cipher = AES.new(self.pad(key).encode("utf-8"), AES.MODE_GCM)
self.nonce = self.cipher.nonce
else:
self.cipher = AES.new(
self.pad(key).encode("utf-8"), AES.MODE_GCM, nonce=nonce
)
self.nonce = nonce
if testData != None:
try:
self.cipher.decrypt(testData)
except:
logger.log(30, traceback.format_exc())
return False
except:
logger.log(30, traceback.format_exc())
def pad(self, key):
BS = AES.block_size
key = key + (BS - len(key) % BS) * chr(BS - len(key) % BS)
return key
def encrypt(self, data):
try:
return self.cipher.encrypt_and_digest(data)
except:
logger.log(20, traceback.format_exec())
return False
def decrypt(self, data):
try:
return self.cipher.decrypt(data)
except:
logger.log(20, traceback.format_exec())
return False

View File

@ -1,12 +1,14 @@
import os import os
import json import json
import msgpack import msgpack
import logging
# TODO: delete # TODO: delete
# TODO: propagate json changes to msgpack automatically # TODO: propagate json changes to msgpack automatically
# TODO: propagate msgpack changes to cache automatically # TODO: propagate msgpack changes to cache automatically
# TODO: Indexing # TODO: Indexing
logger = logging.getLogger("__main__." + __name__)
def _json_to_msg(path: str): def _json_to_msg(path: str):
""" """
@ -40,12 +42,15 @@ class Daisy:
In memory representation In memory representation
""" """
# TODO: Strong encrypt
def __init__( def __init__(
self, self,
filepath: str, filepath: str,
daisyCryptography,
templates: dict = {}, templates: dict = {},
template: bool = False, template: bool = False,
prefillDict: bool = False, prefillDict: bool = False,
remote=False,
): ):
""" """
Parameters Parameters
@ -62,27 +67,36 @@ class Daisy:
prefillDict: bool prefillDict: bool
Whether to fill the record with a template Whether to fill the record with a template
""" """
self.remote = False
self.filepath = filepath self.filepath = filepath
if os.path.exists(filepath) != True: if remote != False:
with open(filepath, "wb") as f: self.remote = True
if template != False: self.remoteNodeID = remote
if template in templates.keys(): else:
t = templates[template].get() if os.path.exists(filepath) != True:
with open(filepath, "wb") as f:
if template != False:
if template in templates.keys():
t = templates[template].get()
if prefillDict != False:
for k in prefillDict.keys():
t[k] = prefillDict[k]
f.write(msgpack.dumps(t))
self.msg = t
else:
logger.log(20, "No such template as: " + template)
else:
t = {}
if prefillDict != False: if prefillDict != False:
for k in prefillDict.keys(): for k in prefillDict.keys():
t[k] = prefillDict[k] t[k] = prefillDict[k]
f.write(msgpack.dumps(t)) f.write(msgpack.dumps(t))
self.msg = t self.msg = t
else: elif os.path.isdir(filepath):
print("No such template as: " + template) self.msg = "directory"
else: else:
f.write(msgpack.dumps({})) with open(filepath, "rb") as f:
self.msg = {} self.msg = msgpack.loads(f.read())
elif os.path.isdir(filepath):
self.msg = "directory"
else:
with open(filepath, "rb") as f:
self.msg = msgpack.loads(f.read())
# Use override for updating # Use override for updating
@ -164,26 +178,18 @@ class Daisy:
else: else:
return None return None
def json_to_msg(self, path: str):
"""
Convert json at the path plus .json to a msgpack binary
def loadTemplates(templatePath: str = "templates"): Parameters
"""Load templates for prefilling records ----------
path: str
Parameters Path to json minus the extension
---------- """
templatePath: str rpath = path + ".json"
Path to templates res = b""
""" with open(rpath) as f:
templates = {} res = msgpack.dumps(json.load(f))
for p in os.listdir(templatePath): with open(path, "wb") as f:
p = templatePath + "/" + p f.write(res)
if os.path.isdir(p):
for ip in os.listdir(p):
ip = p + "/" + ip
if os.path.isdir(ip):
print("Too deep, skipping: " + ip)
else:
templates[ip] = Daisy(ip)
else:
templates[p] = Daisy(p)
self.templates = templates
return templates

57
src/Daisy/Index.py Normal file
View File

@ -0,0 +1,57 @@
from Daisy.Daisy import Daisy
class Index(Daisy):
def __init__(
self,
nodeNickname,
daisyCryptography,
prefill=[],
indexedFields=[],
autoIndex=True,
):
# TODO: Load from disk
if autoIndex:
if prefill != []:
if indexedFields == []:
for i in prefill:
# TODO: Value type annotation
# TODO: Value weighting
for k, v in i.items():
indexedFields.append(k)
indexedFields = list(set(indexedFields))
super().__init__(
nodeNickname + ".index",
daisyCryptography,
prefillDict={"_index": prefill, "_fields": indexedFields},
)
def addEntry(self, entry):
index = self.msg["_index"]
index.append(entry)
self.write(override={"_index": index})
def search(self, keydict: dict, strict: bool = True):
"""
Search cache for record for records with values
keydict: dict
Values to search for
strict: bool
Whether to require values match
"""
results = []
for ob in self.msg["_index"]:
if strict and type(ob) != str:
addcheck = False
for k, v in keydict.items():
if k in ob.keys():
if v in ob[k]:
addcheck = True
else:
addcheck = False
break
if addcheck:
results.append(ob)
return results

6
src/Daisy/Ref.py Normal file
View File

@ -0,0 +1,6 @@
from Daisy.Daisy import Daisy
class Ref(Daisy):
def __init__(self, path, remoteNodeID):
super().__init__(path, remote=remoteNodeID)

View File

@ -1,7 +1,12 @@
from Daisy.Daisy import Daisy from Daisy.Daisy import Daisy
import os import os
import logging
import traceback
logger = logging.getLogger("__main__." + __name__)
# TODO: Higher priority erros
class Store(Daisy): class Store(Daisy):
""" """
@ -10,24 +15,51 @@ class Store(Daisy):
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Store.py>`__ `🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Store.py>`__
""" """
def __init__(self, store: str, path: str, nodeNickname: str): def __init__(self, store: str, path: str, nodeNickname: str, daisyCryptography):
fpath = "daisy/{0}/{1}".format(path, nodeNickname) fpath = "daisy/{0}/{1}".format(path, nodeNickname)
cpath = "{0}/{1}/{2}".format(path, nodeNickname, store) cpath = "{0}/{1}/{2}".format(path, nodeNickname, store)
if not os.path.exists(fpath): if not os.path.exists(fpath):
os.mkdir(fpath) os.mkdir(fpath)
super().__init__("daisy/" + cpath) super().__init__("daisy/" + cpath, daisyCryptography)
self.ephemeral = {}
def update(self, entry: str, data, recur: bool = True): def createEmpty(self, key):
if recur: self.msg[key] = {}
for key in data.keys():
self.msg[entry][key] = data[key]
else:
self.msg[entry] = data
self.write()
def getRecord(self, key: str): # TODO: Update usages of update where necessary to keep out of mem
if key in self.get().keys(): def update(self, entry: str, data, recur: bool = True, write=True):
return self.get()[key] if write:
if recur:
if entry not in self.msg.keys():
self.createEmpty(entry)
for key in data.keys():
self.msg[entry][key] = data[key]
else:
self.msg[entry] = data
self.write()
else: else:
self.cLog(20, "Record does not exist") if recur:
return False if entry not in self.ephemeral.keys():
self.ephemeral[entry] = {}
for key in data.keys():
self.ephemeral[entry][key] = data[key]
else:
self.ephemeral[entry] = data
def getRecord(self, key: str, ephemeral=False):
logger.log(30, key)
try:
if ephemeral:
if key in self.ephemeral.keys():
return self.ephemeral[key]
else:
logger.log(20, "Record does not exist")
return False
else:
if key in self.get().keys():
return self.get()[key]
else:
logger.log(20, "Record does not exist")
return False
except Exception:
logger.log(30, traceback.format_exc())

View File

@ -39,29 +39,39 @@ class Header(Packet):
packetCount: int, packetCount: int,
sender: int, sender: int,
senderDisplayName: int, senderDisplayName: int,
sourceNode: int,
recipient: int, recipient: int,
recipientNode: int, recipientNode: int,
subpacket: bool = False, subpacket: bool = False,
wantFullResponse: bool = False, wantFullResponse: bool = False,
packetsClass: int = 0, packetsClass: int = 0,
pAction: int = -1, pAction: int = -1,
target=True,
): ):
super().__init__( super().__init__(
"", packetsID=packetsID, packetCount=packetCount, packetsClass=packetsClass b"", packetsID=packetsID, packetCount=packetCount, packetsClass=packetsClass
) )
self.target = target
self.sender = sender self.sender = sender
self.senderDisplayName = senderDisplayName self.senderDisplayName = senderDisplayName
self.recipient = recipient if target:
self.recipientNode = recipientNode self.recipient = recipient
self.subpacket = subpacket self.recipientNode = recipientNode
else:
self.recipient = -1
self.recipientNode = -1
# TODO: Populating with submessage ids
self.submessages = []
self.wantFullResponse = wantFullResponse self.wantFullResponse = wantFullResponse
self.pAction = pAction self.pAction = pAction
self.sourceNode = sourceNode
self.packetCount = packetCount
def usePreset(self, path: str): def usePreset(self, path: str, daisyCryptography):
""" """
Add preset fields to the packet Add preset fields to the packet
""" """
preset = Daisy(path) preset = Daisy(path, daisyCryptography)
for key in preset.get().keys(): for key in preset.get().keys():
self.msg[key] = preset.get()[key] self.msg[key] = preset.get()[key]
@ -72,11 +82,13 @@ class Header(Packet):
res = msgpack.loads(super().dump()) res = msgpack.loads(super().dump())
res["sender"] = self.sender res["sender"] = self.sender
res["senderDisplayName"] = self.senderDisplayName res["senderDisplayName"] = self.senderDisplayName
res["sourceNode"] = self.sourceNode
res["recipient"] = self.recipient res["recipient"] = self.recipient
res["recipientNode"] = self.recipientNode res["recipientNode"] = self.recipientNode
res["subpacket"] = self.subpacket res["submessages"] = self.submessages
res["wantFullResponse"] = self.wantFullResponse res["wantFullResponse"] = self.wantFullResponse
res["packetsClass"] = self.packetsClass res["packetsClass"] = self.packetsClass
res["pAction"] = self.pAction res["pAction"] = self.pAction
res["packetCount"] = self.packetCount
return msgpack.dumps(res) return msgpack.dumps(res)

View File

@ -4,9 +4,15 @@ import lzma
import msgpack import msgpack
import random import random
import math import math
import logging
# DO NOT CHANGE DATA SIZE UNLESS YOU KNOW WHAT YOURE DOING # DO NOT CHANGE DATA SIZE UNLESS YOU KNOW WHAT YOURE DOING
logger = logging.getLogger("__main__." + __name__)
def dict2bytes(cdict: dict):
return lzma.compress(msgpack.dumps(cdict))
class Message: class Message:
""" """
@ -25,13 +31,20 @@ class Message:
bytesObject: bytes, bytesObject: bytes,
sender: int, sender: int,
senderDisplayName: int, senderDisplayName: int,
sourceNode,
recipient: int, recipient: int,
recipientNode: int, recipientNode: int,
cryptographyInfo, cryptographyInfo,
packetsClass,
pAction,
dataSize: int = 128, dataSize: int = 128,
wantFullResponse: bool = False, wantFullResponse: bool = False,
packetsClass: int = 0, target=True,
subMessage=False,
primaryMessage=None,
pskEncrypt=False
): ):
# TODO: PSK for usage prior to credentials
""" """
Parameters Parameters
---------- ----------
@ -59,6 +72,11 @@ class Message:
packetsClass: int packetsClass: int
Which protocol the packets are using Which protocol the packets are using
""" """
self.recipientNode = recipientNode
self.target = target
self.subMessage = subMessage
if subMessage:
self.primaryMessage = primaryMessage
if isinstance(bytesObject, list): if isinstance(bytesObject, list):
packets = [h.Header(bytesObject[0])] packets = [h.Header(bytesObject[0])]
for packet in bytesObject: for packet in bytesObject:
@ -75,10 +93,18 @@ class Message:
# Data passed in by peers should already have been e2ee encrypted by SubtleCrypto # Data passed in by peers should already have been e2ee encrypted by SubtleCrypto
# Transport encryption # Transport encryption
# bytesObject = lzma.compress(bytesObject, str(recipientNode).zfill(6), isDict=False) # bytesObject = lzma.compress(bytesObject, str(recipientNode).zfill(6), isDict=False)
bytesObject = cryptographyInfo.encrypt(bytesObject, self.no) if subMessage == False:
bytesObject, nonce, tag = cryptographyInfo.encrypt(
bytesObject, str(recipientNode).zfill(6), isDict=False, pskEncrypt=pskEncrypt
)
logger.log(10, bytesObject)
self.nonce = nonce
self.tag = tag
packets = [] packets = []
self.packetsID = random.randrange(0, 999999) self.packetsID = random.randrange(0, 999999)
pnum = 1 pnum = 1
# if subMessage:
dataSize = 80
blen = math.ceil(len(bytesObject) / dataSize) blen = math.ceil(len(bytesObject) / dataSize)
tb = b"" tb = b""
for it in range(blen): for it in range(blen):
@ -86,30 +112,52 @@ class Message:
b = bytesObject[it * dataSize :] b = bytesObject[it * dataSize :]
else: else:
b = bytesObject[it * dataSize : (it * dataSize + dataSize)] b = bytesObject[it * dataSize : (it * dataSize + dataSize)]
packets.append( if subMessage:
p.Packet(b, self.packetsID, pnum, packetsClass=packetsClass) packets.append(
) p.Packet(
b,
self.packetsID,
pnum,
packetsClass=packetsClass,
primaryMessage=primaryMessage,
)
)
else:
packets.append(
p.Packet(b, self.packetsID, pnum, packetsClass=packetsClass)
)
pnum += 1 pnum += 1
tb += b tb += b
packets.insert( if subMessage:
0, pass
h.Header( else:
self.packetsID, packets.insert(
pnum, 0,
sender, h.Header(
senderDisplayName, self.packetsID,
recipient, pnum,
recipientNode, sender,
wantFullResponse=wantFullResponse, senderDisplayName,
packetsClass=packetsClass, sourceNode,
), recipient,
) recipientNode,
wantFullResponse=wantFullResponse,
packetsClass=packetsClass,
pAction=pAction,
target=target,
),
)
self.fullPackets = [p for p in packets]
if subMessage:
pnum -= 1
for it in range(pnum): for it in range(pnum):
packet = msgpack.loads(packets[it].dump()) packet = msgpack.loads(packets[it].dump())
packet["packetCount"] = pnum packet["packetCount"] = pnum
packets[it] = msgpack.dumps(packet) packets[it] = msgpack.dumps(packet)
self.packets = packets self.packets = packets
def get(self) -> list[p.Packet]: def get(self) -> list[p.Packet]:
@ -118,12 +166,33 @@ class Message:
""" """
return self.packets return self.packets
def reassemble(self, completedMessage: dict): def reassemble(self, completedMessage: dict, cryptographyInfo, subMessage=False, yctx=None, packetCount=None):
""" """
Reassemble packets from a completed message in `Sponge.base` Reassemble packets from a completed message in `Sponge.base`
""" """
data = b"" data = b""
for it in range(1, int(completedMessage["packetCount"])): sourceNode = None
data += completedMessage["data"][completedMessage["dataOrder"].index(it)] # TODO: Fix reassembly for primary
res = msgpack.loads(lzma.decompress(data)) if subMessage:
return res sourceNode = yctx["sourceNode"]["val"]
for it in range(1, packetCount+1):
data += completedMessage["data"][completedMessage["dataOrder"].index(it)]
data = msgpack.loads(lzma.decompress(data))
logger.log(10, data)
logger.log(10, completedMessage["data"])
logger.log(10, completedMessage["dataOrder"])
else:
# TODO: Cryptography setup
packetCount = int(completedMessage.yctx["packetCount"]["val"])
sourceNode = completedMessage.yctx["sourceNode"]["val"]
logger.log(10, completedMessage.data)
for it in range(1, packetCount):
if it in completedMessage.dataOrder:
data += completedMessage.data[completedMessage.dataOrder.index(it)]
logger.log(10, "pre decrypt")
logger.log(10, data)
data = cryptographyInfo.decrypt(
data, sourceNode, completedMessage.nonce, completedMessage.tag
)
# data = msgpack.loads(lzma.decompress(data))
return data

View File

@ -0,0 +1 @@
# WARNING: DO NOT TRY TO POKE A BARNACLE

View File

@ -0,0 +1,27 @@
from Packets.Message import Message
import Packets.Message
class Bubble(Message):
def __init__(
self,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
data,
):
bytesOb = Packets.Message.dict2bytes({"data": data, "recipient": recipient, "target": "bubble"})
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
2,
0,
)

View File

@ -0,0 +1,29 @@
from Packets.Message import Message
import Packets.Message
class IndexSync(Message):
def __init__(
self,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
index,
target=False
):
bytesOb = Packets.Message.dict2bytes({"index": index})
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
1,
2,
target=target
)

View File

@ -0,0 +1,31 @@
from Packets.Message import Message
import Packets.Message
class CatchRequest(Message):
def __init__(
self,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
head,
body,
fins,
pskEncrypt=False
):
bytesOb = Packets.Message.dict2bytes({"head": head, "body": body, "fins": fins, "recipient": sender, "recipientNode": sourceNode})
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
1,
0,
pskEncrypt=pskEncrypt
)

View File

@ -0,0 +1,29 @@
from Packets.Message import Message
import Packets.Message
class CatchResponse(Message):
def __init__(
self,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
html,
pskEncrypt=False
):
bytesOb = Packets.Message.dict2bytes({"html": html, "recipient": recipient, "target": "catch"})
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
1,
1,
pskEncrypt=pskEncrypt
)

View File

@ -0,0 +1,46 @@
from Packets.Message import Message
import Packets.Message
# TODO: Send with psk encryption
class Handshake(Message):
def __init__(
self, sender, senderID, recipient, recipientNode, cryptographyInfo, onodeID, sourceNode
):
publicKey = None
ephemeralKey = None
record = cryptographyInfo.getRecord("key", "self")
if record != False:
if "publicKey" in record.keys():
publicKey = record["publicKey"]
else:
raise Exception("Public key missing for node")
if "ourEphemeralKey" in record.keys():
ephemeralKey = record["ourEphemeralKey"]
else:
cryptographyInfo.genOurEphemeralKey(onodeID)
record = cryptographyInfo.getRecord("key", onodeID, ephemeral=True)
ephemeralKey = record["ourEphemeralKey"].export_key(
format="PEM",
prot_params={"iteration_count": 131072}
)
if "staticKey" not in record.keys():
cryptographyInfo.genStaticKey(onodeID)
else:
raise Exception("Node does not exist")
bytesOb = Packets.Message.dict2bytes(
{"ephemeralKey": ephemeralKey}
)
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
3,
0,
pskEncrypt=True
)

View File

@ -0,0 +1,29 @@
from Packets.Message import Message
import Packets.Message
class HopperRequest(Message):
def __init__(
self,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
url,
params,
method,
cryptographyInfo,
):
bytesOb = Packets.Message.dict2bytes({"url": url, "parameters": params, "method": method, "recipient": sender, "recipientNode": sourceNode})
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
5,
0,
)

View File

@ -0,0 +1,23 @@
from Packets.Message import Message
import Packets.Message
class HopperResponse(Message):
def __init__(
self, sender, senderID, sourceNode, recipient, recipientNode, response, cryptographyInfo
):
bytesOb = Packets.Message.dict2bytes({"res": response, "recipient": recipient, "target": "hopper"})
# bytesOb = cryptographyInfo.encrypt(bytesOb, recipientNode)
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
5,
1
)

View File

@ -0,0 +1,36 @@
from Packets.Message import Message
import Packets.Message
import logging
logger = logging.getLogger("__main__." + __name__)
# TODO: Add public key
class AnnounceMessage(Message):
def __init__(
self,
sender,
senderID,
sourceNode,
cryptographyInfo,
mapping,
):
mapping["publicKey"] = cryptographyInfo.getRecord("key", "self")["publicKey"]
recipient = -1
recipientNode = -1
bytesOb = Packets.Message.dict2bytes(mapping)
logger.log(10, "Mapping bytes")
logger.log(10, bytesOb)
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
0,
0,
target=False,
)

View File

@ -0,0 +1,20 @@
# Template for a Protocol message
from Packets.Message import Message
import Packets.Message
class Template(Message):
def __init__(
self, sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo
):
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
0,
0,
)

View File

View File

@ -32,6 +32,7 @@ class Packet:
packetNumber=False, packetNumber=False,
packetCount: int = 1, packetCount: int = 1,
packetsClass: int = -1, packetsClass: int = -1,
primaryMessage=None,
): ):
if packetsID == False: if packetsID == False:
self.packetsID, self.packetNumber, self.data, self.packetsClass = ( self.packetsID, self.packetNumber, self.data, self.packetsClass = (
@ -42,14 +43,7 @@ class Packet:
self.packetNumber = packetNumber self.packetNumber = packetNumber
self.data = data self.data = data
self.packetsClass = packetsClass self.packetsClass = packetsClass
if packetsClass != -1: self.primaryMessage = primaryMessage
pass
"""template = Daisy("daisy/packet_templates/template.lookup").get()[
str(packetsClass).zfill(2)
]
edata = Daisy("daisy/packet_templates/" + template)
for key in edata.get().keys():
self.data[key] = edata.get()[key]"""
def parsePayload(data): def parsePayload(data):
""" """
@ -75,6 +69,8 @@ class Packet:
} }
if res["data"] == "": if res["data"] == "":
res.pop("data") res.pop("data")
if self.primaryMessage != None:
res["primaryMessage"] = self.primaryMessage
ores = msgpack.dumps(res) ores = msgpack.dumps(res)
# logging.log(20, "Packet size: " + str(sys.getsizeof(ores))) # logging.log(20, "Packet size: " + str(sys.getsizeof(ores)))
return ores return ores

View File

@ -1,4 +1,39 @@
class SubMessage: from Packets.Message import Message
""" import Packets.Message
TODO import logging
"""
logger = logging.getLogger("__main__." + __name__)
class SubMessage(Message):
def __init__(
self,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
protocolID,
pAction,
data,
target=True,
primaryMessage=None
):
bytesOb = Packets.Message.dict2bytes(data)
logger.log(10, "Submessage bytes")
logger.log(10, bytesOb)
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
protocolID,
pAction,
target=target,
subMessage=True,
primaryMessage=primaryMessage
)

33
src/Services/Action.py Normal file
View File

@ -0,0 +1,33 @@
class Action:
def __init__(
self,
action,
data,
sender=None,
senderID=None,
sourceNode=None,
recipient=None,
recipientNode=None,
):
self.action = action
self.data = data
if sender != None:
self.data["sender"] = sender
if senderID != None:
self.data["senderID"] = senderID
if sourceNode != None:
self.data["sourceNode"] = sourceNode
if recipient != None:
self.data["recipient"] = recipient
if recipientNode != None:
self.data["recipientNode"] = recipientNode
def getAction(self):
return self.action
def getData(self):
return self.data

27
src/Services/ToDo.py Normal file
View File

@ -0,0 +1,27 @@
from Services.Action import Action
class ToDo:
def __init__(self):
self.actions = []
# otf here meaning on the fly
# This prefix should be applied to optional
# fields that allow soft defining components
# by taking in their initialization parameters
# and instantiating an object with those
# Q: Should be break camel case here for cleaner
# reading? ex. otf_action looks better then otfAction
def add(self, action, otfAction=None, otfData=None):
if otfAction != None:
if otfData != None:
action = Action(otfAction, otfData)
self.actions.append(action)
def next(self):
if len(self.actions) < 1:
return (False, None)
else:
action = self.actions.pop()
return (action.getAction(), action.getData())

View File

@ -39,7 +39,20 @@ class Network:
self.mimport() self.mimport()
self.lookup = {} self.lookup = {}
def addLookup(self, onodeID: str, mnodeID: str): async def addLookup(self, onodeID: str, mnodeID: str):
"""
Adds node to lookup
Parameters
----------
onodeID: str
Internal nodeID
mnodeID: str
MeshTastic nodeID
"""
self.lookup[onodeID] = mnodeID
def syncaddLookup(self, onodeID: str, mnodeID: str):
""" """
Adds node to lookup Adds node to lookup

View File

@ -0,0 +1,5 @@
---
title: Node
---
flowchart LR
id

View File

@ -8,6 +8,21 @@
--palette-two: #A6B08E; --palette-two: #A6B08E;
--palette-three: #879B77; --palette-three: #879B77;
--palette-four: #61805B; --palette-four: #61805B;
--grid-columns: 8;
--grid-rows: 8;
}
#controls {
display: grid;
grid-template-columns: repeat(var(--grid-columns), auto);
gap: 5%;
}
#render {
display: grid;
grid-template-columns: repeat(var(--grid-columns), 20px);
grid-template-rows: repeat(var(--grid-rows), 20px);
border: 1px solid black;
} }
html { html {
@ -22,6 +37,10 @@ html {
background-color: var(--palette-two); background-color: var(--palette-two);
} }
.plankInner {
display: none;
}
ul { ul {
padding: 0; padding: 0;
list-style-type: none !important; list-style-type: none !important;
@ -29,9 +48,12 @@ ul {
li { li {
padding-top: 5px; padding-top: 5px;
text-decoration: none;
list-style-type: none;
} }
input[type=text], input[type=text],
input[type=number] { input[type=number] {
min-width: 150px; min-width: 150px;
max-width: 150px;
} }

View File

@ -0,0 +1,5 @@
---
title: Node
---
flowchart LR
id

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 707 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.7 KiB

164
src/Splash/res/js/custom.js Normal file
View File

@ -0,0 +1,164 @@
function hopID() {
document.getElementById("hopperPeerID").value =
document.getElementById("peerID").value;
}
function toggle(id) {
var cstyle = document.getElementById(id).style.display;
var nstyle = "block";
if (cstyle == "block") {
nstyle = "none";
}
document.getElementById(id).style.display = nstyle;
}
function addFin() {
var pfin = document.querySelector("#fins").lastElementChild;
var pclone = pfin.cloneNode();
pclone.id = "cfin";
var cid = pclone.firstElementChild.id;
cid = cid.substring(3);
cid = ("" + parseInt(cid) + 1).padStart(2, "0");
cid = fin + cid;
pclone.firstElementChild.id = cid;
pclone.firstElementChild.id = cid;
pfin.insertAdjacentElement("afterend", pclone);
}
function getFins() {
var fins = document.querySelector("#fins").children;
var finsStr = "";
for (var i = 0; i < fins.length; i++) {
var fin = fins[i];
finsStr = finsStr + fin.firstElementChild.value;
if (i != fins.length - 1) {
finsStr = finsStr + 1;
}
}
document.getElementById("finsStr").value = finsStr;
}
function getCatch() {
document.getElementById("catchPeerID").value =
document.getElementById("peerID").value;
getFins();
}
// P2Chat code
function splash(that) {
//alert(parent.id);
//alert(parent.getAttribute("data-coord"));
//alert(that.value);
that.style.backgroundColor = document.querySelector("#color").value;
}
function cGen(that) {
document.getElementById("render").innerHTML = "";
var parent = that.parentElement;
// alert("parent");
var canvasX = Number(document.querySelector("#canvasX").value);
// alert("x");
canvasX = Math.floor(canvasX);
document
.querySelector(":root")
.style.setProperty("--grid-rows", "" + canvasX);
// alert("grid");
var canvasY = Number(document.querySelector("#canvasY").value);
//alert(document.querySelector("#canvasY").value);
canvasY = Math.floor(canvasY);
document
.querySelector(":root")
.style.setProperty("--grid-columns", "" + canvasY);
//alert(canvasY);
var nodeRender = "";
var cloneRender = "";
var nodeControl = "";
var cloneControl = "";
//alert("start loop");
for (let x = 0; x < canvasX; x++) {
for (let y = 0; y < canvasY; y++) {
//alert(" in");
nodeRender = document.getElementById("rendertemplate");
//alert(" past template");
cloneRender = nodeRender.cloneNode(true);
cloneRender.style.display = "grid";
cloneRender.id = "i" + x + "x" + y;
if (y == 0) {
//alert(cloneRender.innerHTML);
}
document.getElementById("render").appendChild(cloneRender);
}
}
}
function setColor(that) {
var color = that.value;
//alert(typeof color);
if (color.includes("#")) {
document.querySelector("#color").value = color;
} else {
document.querySelector("#color").value = "#" + color;
document.querySelector("#picker").value = "#" + color;
}
}
function saveAs(uri, filename) {
var link = document.createElement("a");
if (typeof link.download === "string") {
link.href = uri;
link.download = filename;
//Firefox requires the link to be in the body
document.body.appendChild(link);
//simulate click
link.click();
//remove the link when done
document.body.removeChild(link);
} else {
window.open(uri);
}
}
function save(toFile) {
var canvas = document.createElement("canvas");
var canvasX = Number(document.querySelector("#canvasX").value);
var canvasY = Number(document.querySelector("#canvasY").value);
//alert(canvasX);
//alert(canvasY);
canvas.width = canvasY;
canvas.height = canvasX;
var ctx = canvas.getContext("2d");
var x = 0;
var y = 0;
for (x = 0; x < canvasX; x++) {
for (y = 0; y < canvasY; y++) {
//alert(document.querySelector("#i" + x + "x" + y).style.backgroundColor);
//alert("before fill style");
ctx.fillStyle = document.querySelector(
"#i" + x + "x" + y,
).style.backgroundColor;
//ctx.fillStyle = "#00ff00";
//alert("after fill style");
ctx.fillRect(y, x, 1, 1);
}
}
if (toFile) {
saveAs(canvas.toDataURL("image/png"), " download.png");
} else {
document.getElementById("p2img").value = canvas.toDataURL("image/png");
}
}
function p2ToBubble() {
save();
var bub = new Object();
bub.img = document.getElementById("p2img").value;
document.getElementById("chat_message").value += JSON.stringify(bub);
}
document.addEventListener("DOMContentLoaded", function(event) {
setColor(document.getElementById("picker"));
cGen(document.getElementById("canvasY"));
});

View File

@ -1,13 +1,30 @@
from uuid import uuid4
import Components.hopper as hopper
from Packets.Messages.Protocols.catch.Request import CatchRequest
from Packets.Messages.Protocols.catch.IndexSync import IndexSync
from Packets.Messages.Protocols.hopper.Request import HopperRequest
from Packets.Messages.Protocols.bubble.Bubble import Bubble
from microdot import Microdot from microdot import Microdot
from microdot import send_file from microdot import send_file
from microdot.websocket import with_websocket from microdot.websocket import with_websocket, WebSocketError
from microdot import Request from microdot import Request
from microdot.jinja import Template
from microdot.session import Session, with_session
import random import random
import json import json
import time import time
import logging
import traceback
import uuid
import re
import msgpack import msgpack
logger = logging.getLogger("__main__." + __name__)
# Enable 500 kB files in the webui # Enable 500 kB files in the webui
Request.max_content_length = 1024 * 1024 * 0.5 Request.max_content_length = 1024 * 1024 * 0.5
Request.max_body_length = 1024 * 1024 * 0.5 Request.max_body_length = 1024 * 1024 * 0.5
@ -44,17 +61,30 @@ class Server:
Reference to our Catch Cache instance to pull from for serving Catchs Reference to our Catch Cache instance to pull from for serving Catchs
""" """
def __init__(self, transceiver, catch, onodeID, network, cLog): def __init__(
self.cLog = cLog self,
transceiver,
catch,
onodeID,
network,
cryptographyInfo,
remoteCatchIndex,
cache,
):
self.transceiver = transceiver self.transceiver = transceiver
self.network = network self.network = network
self.network.addLookup(onodeID, self.transceiver.interface.localNode.nodeNum) self.network.syncaddLookup(onodeID, self.transceiver.interface.localNode.nodeNum)
self.nodeID = str(onodeID) self.nodeID = str(onodeID)
self.peerIDs = {} self.peerIDs = {}
self.app = Microdot() self.app = Microdot()
self.session = Session(
self.app, secret_key=str(uuid.uuid4())
)
self.catch = catch self.catch = catch
# self.nmap = {self.nodeID: self.t.interface.localNode.nodeNum} self.cache = cache
# self.cLog(20, "Initialized server") self.cryptographyInfo = cryptographyInfo
self.remoteCatchIndex = remoteCatchIndex
logger.info("Initialized server")
@self.app.route("/res/<path:path>") @self.app.route("/res/<path:path>")
async def static(request, path): async def static(request, path):
@ -64,7 +94,45 @@ class Server:
if ".." in path: if ".." in path:
# directory traversal is not allowed # directory traversal is not allowed
return "Not found", 404 return "Not found", 404
return send_file("webui/build/res/" + path, max_age=86400) return send_file("Splash/build/res/" + path, max_age=86400)
@self.app.route("/")
async def index(request):
"""
Static handler to serve the web ui
"""
try:
return send_file("Splash/build/index/index.html")
except Exception as e:
logger.error(traceback.format_exc())
return "Server error", 500
@self.app.route("/hop/<tmpResourceID>")
async def hop(request, tmpResourceID):
try:
return self.cache.get("tmp/hopper/" + tmpResourceID).get()["html"]
except Exception as e:
logger.error(traceback.format_exc())
return "Server error", 500
@self.app.route("/api/json")
async def api(request):
try:
return {"hello": "world"}
except Exception as e:
logger.error(traceback.format_exc())
return "Server error", 500
@self.app.route("/admin")
@with_session
async def admin(request):
try:
return Template("Splash/admin/admin.html").render(psks=self.getPSKs())
except Exception as e:
logger.error(traceback.format_exc())
return "Server error", 500
@self.app.route("/bubble") @self.app.route("/bubble")
@with_websocket @with_websocket
@ -77,101 +145,250 @@ class Server:
`🔗 HTMX docs <https://htmx.org/docs/>`_ `🔗 HTMX docs <https://htmx.org/docs/>`_
""" """
while True: while True:
r = await ws.receive() try:
message = json.loads(r) r = await ws.receive()
trigger = message["HEADERS"]["HX-Trigger"] message = json.loads(r)
# TODO: Drop old id from cache on regen #logger.debug(json.dumps(message, indent=4))
if trigger == "gpID": trigger = message["HEADERS"]["HX-Trigger"]
peerID = str(random.randrange(0, 1000000)).zfill(6) logger.debug(f"Trigger: {trigger}")
await ws.send( # TODO: Drop old id from cache on regen
""" if trigger == "gpID":
<p id="vpeerID">Peer ID: {0}</p> peerID = str(random.randrange(0, 1000000)).zfill(6)
""".format( await ws.send(
peerID """
<p id="vpeerID">Peer ID: {0}</p>
""".format(
peerID
)
) )
) await ws.send(
await ws.send( """
""" <input id="peerID" type="hidden" value="{0}" >
<input id="peerID" type="hidden" value="{0}" > """.format(
""".format( peerID
peerID )
) )
) await ws.send(
await ws.send( """
""" <p id="vnodeID">Node ID: {0}</p>
<p id="vnodeID">Node ID: {0}</p> """.format(
""".format( self.nodeID
self.nodeID )
) )
) await ws.send(
await ws.send( """ <input id="nodeID" type="hidden" value="{0}" >""".format(
""" <input id="nodeID" type="hidden" value="{0}" >""".format( self.nodeID
self.nodeID )
) )
) await ws.send(
await ws.send( "<input id='gID' type='hidden' value='{0}' hx-swap-oob='true'>".format(
"<input id='gID' type='hidden' value='{0}' hx-swap-oob='true'>".format( peerID
peerID )
) )
) await ws.send(
await ws.send( "<input type='hidden' name='eID' value='{0}' hx-swap-oob='true'>".format(
"<input type='hidden' name='eID' value='{0}' hx-swap-oob='true'>".format( peerID
peerID )
) )
) peer = {"usedLast": round(time.time() * 1000), "ws": ws}
peer = {"usedLast": round(time.time() * 1000), "ws": ws} self.peerIDs[peerID] = peer
self.peerIDs[peerID] = peer elif trigger == "bubble":
elif trigger == "bubble": sender = message["bID"]
sender = message["bID"] data = message["chat_message"]
data = message["chat_message"] # TODO: Setting sender name id
# TODO: Setting sender name id # senderName = message["senderID"]
# senderName = message["senderID"] senderName = 000000
senderName = 000000 recipient = message["recipientID"]
recipient = message["recipientID"] recipientNode = message["recipientNode"]
recipientNode = message["recipientNode"] r = Bubble(
await self.t.addPackets( sender,
msgpack.dumps({"data": data}), senderName,
sender, self.nodeID,
senderName, recipient,
recipient, recipientNode,
int(recipientNode), self.cryptographyInfo,
directID=self.network.doLookup(recipientNode), data,
packetsClass=2, )
) await self.transceiver.sendMessage(r)
elif trigger == "catch": elif trigger == "catch":
res = self.catch.get(message["head"], message["body"]) res = self.catch.get(
await ws.send('<div id="catchDisplay">{0}</div>'.format(res)) message["head"],
# TODO: Catch update packets message["body"],
elif trigger == "catchEdit": fins=message["finsStr"].split(","),
self.catch.addc( )
message["eID"], if res == False:
self.nodeID, await ws.send(
message["sep"], '<div id="catchDisplay">{0}</div>'.format(
message["head"], "Searching PierMesh for Catch please wait...<img src='/res/img/searching.gif'>"
message["body"], )
{"html": message["catchContent"]}, )
)
await ws.send(
"""
<ul id="resultsCatch" hx-swap-oob='true'><li>OK</li></ul>
"""
)
else:
await ws.send(
"""<div id="chat_room" hx-swap-oob="beforeend">hi</div>"""
)
@self.app.route("/") peerID = message["catchPeerID"]
async def index(request): q = {
""" "head": message["head"],
Static handler to serve the web ui "body": message["body"],
""" }
return send_file("webui/build/index/index.html") fins = None
if "fins" in message:
if message["fins"] != "":
q["fins"] = message["fins"]
fins = message["fins"]
# TODO: Handling multiple results
q = self.remoteCatchIndex.search(q)[0]
if q != False:
m = CatchRequest(
peerID,
000000,
self.nodeID,
q["remoteNode"],
q["remoteNode"],
self.cryptographyInfo,
message["head"],
message["body"],
fins
)
await self.transceiver.sendMessage(m)
# TODO: Daisy replication settings
elif trigger == "hopper":
url = message["url"]
logger.debug(url)
isPost = "isPost" in message.keys()
method = "get"
if isPost:
method = "post"
remote = "remote" in message.keys()
remoteNode = message["remoteNode"]
params = ""
try:
params = json.loads(message["params"])
except json.decoder.JSONDecodeError:
# TODO: Proper error code
# TODO: Fix stalling at startup
# TODO: Request missing packets`
logger.error("Parameters on hop request were not json, dropping")
# TODO: Redirecting to html content
if remote and (remoteNode != ""):
peerID = message["hopperPeerID"]
await ws.send(
'<div id="lilypad">{0}</div>'.format(
"Requesting hop from remote node...<img src='/res/img/searching.gif'>"
)
)
async def sendToPeer(self, peerID: str, data: str): r = HopperRequest(
peerID,
000000,
self.nodeID,
remoteNode,
remoteNode,
url,
params,
method,
self.cryptographyInfo,
)
await self.transceiver.sendMessage(r)
else:
if isPost:
await ws.send(
'<div id="lilypad">{0}</div>'.format(
hopper.post(url, params)
)
)
else:
res = hopper.get(url, params)
if res["content-type"] == "text/html":
logger.debug("Local hopping done, html content found")
resID = uuid4()
self.cache.create(
"tmp/hopper/" + resID, {"html": res}
)
await ws.send(
"<div id='lilypad'><a href='/hop/{0}'></a></div>".format(
resID
)
)
else:
logger.debug("Local hopping done, non html content found")
await ws.send(
'<div id="lilypad">{0}</div>'.format(res)
)
# TODO: Catch update packets
elif trigger == "catchEdit":
if message["eID"] == "":
raise ValueError("Peer ID is blank")
self.catch.addc(
message["eID"],
self.nodeID,
message["sep"],
message["head"],
message["body"],
{"html": message["catchContent"]},
)
await ws.send(
"""
<ul id="resultsCatch" hx-swap-oob='true'><li>OK</li></ul>
"""
)
logger.info(self.catch.genIndex(self.nodeID))
indexUpdate = IndexSync(
message["eID"],
0,
self.nodeID,
"-00001",
"-00001",
self.cryptographyInfo,
self.catch.genIndex(self.nodeID)
)
await self.transceiver.sendMessage(indexUpdate)
elif trigger == "admin":
# TODO
pass
else:
await ws.send(
"""<div id="chat_room" hx-swap-oob="beforeend">hi</div>"""
)
except WebSocketError as e:
pass
# Uncomment below for WebSocket debugging
logger.debug(traceback.format_exc())
return "Server error", 500
except Exception as e:
logger.error(traceback.format_exc())
return "Server error", 500
async def getPSKs(self):
psks = [
{"psk": v["PSK"], "nodeID": k}
for k, v in self.cryptographyInfo["msg"].items()
]
return psks
# TODO: Send catch to catch display
async def sendToPeer(self, peerID: str, data: str, target: str):
""" """
Send data to Websocket of peer with peerID Send data to Websocket of peer with peerID
""" """
await self.peerIDs[peerID]["ws"].send( logger.debug(target)
"<ul id='chat_room' hx-swap-oob='afterend'><li>{0}</li></ul>".format(data) if target == "bubble":
) mapper = []
for ob in re.findall('{(.+?)}', data):
cobs = "{" + ob + "}"
cob = json.loads(cobs)
if "img" in cob.keys():
cimg = cob["img"]
mapper.append([cobs, f"<img style='width:30vw;image-rendering:pixelated;' src='{cimg}'>"])
for map in mapper:
data = data.replace(map[0], map[1])
await self.peerIDs[peerID]["ws"].send(
"<ul id='chat_room' hx-swap-oob='afterend'><li>{0}</li></ul>".format(data)
)
elif target == "catch":
logger.debug("In catch")
await self.peerIDs[peerID]["ws"].send(
"<div id='catchDisplay' style='background-color: var(--palette-three);'>{0}</div>".format(data)
)
elif target == "hopper":
await self.peerIDs[peerID]["ws"].send(
"<div id='lilypad' style='background-color: var(--palette-three);'>{0}</div>".format(data)
)

View File

@ -0,0 +1,16 @@
{% extends "shared/base.html" %}
{% block body %}
<form id="admin" hx-ext="ws" ws-connect="/bubble">
<ul id="psks">
{% for psk in psks %}
<li>
Node ID: {{ psk['nodeID'] }}
<br>
PSK:
<input type="number" id="{{ psk['nodeID'] }}" name="{{ psk['nodeID'] }}" value="{{ psk['psk'] }}" max="999999">
</li>
{% endfor %}
</ul>
<button id="pskUpdate" name="pskUpdate">Update PSK</button>
</form>
{% endblock %}

View File

@ -1,22 +1,25 @@
{% extends "shared/base.html" %} {% extends "shared/base.html" %}
{% block body %} {% block body %}
<div>
<img alt="PierMesh logo" height="128px" src="/res/img/logo.png"> <img alt="PierMesh logo" height="128px" src="/res/img/logo.png" style="display: inline-block;">
<br> <h1 style="display: inline-block;">PierMesh</h1>
<br> </div>
{% include "shared/catch.nav.html" %}
<br>
{% include "shared/catch.editor.html" %}
<div hx-history="false"> <div hx-history="false">
</div> </div>
<br> <br>
<div class="plank" hx-ext="ws" ws-connect="/bubble"> <div class="plank" hx-ext="ws" ws-connect="/bubble">
<p id="vpeerID">Peer ID:</p> <p id="vpeerID">Peer ID:</p>
<input id="peerID" type="hidden" > <input id="peerID" type="hidden">
<p id="vnodeID">Node ID:</p> <p id="vnodeID">Node ID:</p>
<input id="peerID" type="hidden" > <input id="nodeID" type="hidden">
<button id="gpID" ws-send>Connect</button> <button id="gpID" ws-send>Connect</button>
</div> </div>
<br> <br>
{% include "shared/hopper.html" %}
<br>
{% include "shared/catch.nav.html" %}
<br>
{% include "shared/catch.editor.html" %}
<br>
{% include "shared/messenger.html" %} {% include "shared/messenger.html" %}
{% endblock %} {% endblock %}

View File

@ -10,6 +10,8 @@
<script src="/res/js/node_modules/htmx.org/dist/htmx.min.js"></script> <script src="/res/js/node_modules/htmx.org/dist/htmx.min.js"></script>
<script src="/res/js/ws.js"> <script src="/res/js/ws.js">
</script> </script>
<script src="/res/js/custom.js">
</script>
</head> </head>
<body> <body>

View File

@ -1,23 +1,23 @@
<div class="plank" hx-ext="ws" ws-connect="/bubble"> {% extends "shared/plank.html" %}
<img src="/res/img/catchdisplay.png"> {% set plankTitle = "catch editor" %}
<br> {% set formID = "catchEdit" %}
Catch publisher<br> {% set icon = "catchdisplay.png" %}
<ul id="resultsCatch"> {% block insert %}
</ul> <ul id="resultsCatch">
<form id="catchEdit" ws-send> </ul>
Head <br> <input type="text" name="head" size="4" maxlength="4"><br> Head <br> <input type="text" name="head" size="4" maxlength="4"><br>
Seperator <br> <input type="text" name="sep" size="1" maxlength="1"><br> Seperator <br> <input type="text" name="sep" size="1" maxlength="1"><br>
Body <br> <input type="text" name="body" size="16" maxlength="16"><br> Body <br> <input type="text" name="body" size="16" maxlength="16"><br>
Fins<br> Fins<br>
<ul id="fins"> <ul id="fins">
<li class="fin"> <input type="text" size="8" maxlength="8"> </li> <li class="fin"> <input type="text" size="8" maxlength="8"> </li>
<li><button>+</button></li> <li><button>+</button></li>
</ul> </ul>
Content Content
<br> <br>
<textarea style="min-width: 200px;min-height:200px;" name="catchContent"></textarea> <textarea style="min-width: 200px;min-height:200px;" name="catchContent"></textarea>
<br> <br>
<button onclick="document.getElementById('eID').value = document.getElementById('peerID').value">Publish</button> <button onclick="document.getElementById('eID').value = document.getElementById('peerID').value">Publish</button>
<input type="hidden" name="eID" id="eID"> <input type="hidden" name="eID" id="eID">
</form>
</div> {% endblock %}

View File

@ -1,31 +1,31 @@
<div class="plank" hx-ext="ws" ws-connect="/bubble"> {% extends "shared/plank.html" %}
<img src="/res/img/catchdisplay.png"> {% set plankTitle = "catch" %}
<br> {% set formID = plankTitle %}
Catch<br><br> {% block insert %}
<form id="catch" ws-send> <label for="head">Head (max. 4 characters)</label>
<label for="head">Head (max. 4 characters)</label> <br>
<br> <input type="text" id="head" name="head" size="4" maxlength="4">
<input type="text" id="head" name="head" size="4" maxlength="4"> <br>
<br> <label for="sep">Seperator</label>
<label for="sep">Seperator</label> <br>
<br> <input type="text" id="sep" name="sep" size="1" maxlength="1"><br>
<input type="text" id="sep" name="sep" size="1" maxlength="1"><br> <label for="body">Body (max. 16 characters)</label>
<label for="body">Body (max. 16 characters)</label> <br>
<br> <input type="text" id="body" name="body" size="16" maxlength="16">
<input type="text" id="body" name="body" size="16" maxlength="16"> <ul id="fins">
<ul id="fins"> Fins:
Fins: <li id="pfin" class="fin">
<li class="fin"> <input type="text" id="fin00" name="fin00" size="8" maxlength="8">
<input type="text" size="8" maxlength="8"> </li>
</li> </ul>
<li> <button onclick="addFin();">+ Fin</button>
<button>+</button> <input type="hidden" id="catchPeerID" name="catchPeerID">
</li> <input type="hidden" id="finsStr" name="finsStr">
</ul> <br>
<button>Get</button> <button onclick="getCatch()">Get</button>
</form> <br>
Results: Results:
<br> <br>
{% include "shared/catch.html" %} {% include "shared/catch.html" %}
<br> <br>
</div> {% endblock %}

View File

@ -0,0 +1,30 @@
{% extends "shared/plank.html" %}
{% set plankTitle = "hopper" %}
{% set formID = plankTitle %}
{% block insert %}
<label for="url">URL</label>
<br>
<input type="text" id="url" name="url" maxlength="255">
<br>
<label for="params">Parameters (json)</label>
<br>
<input type="textarea" id="params" name="params">
<br>
<label for="isPost">Unchecked: GET, Checked: POST</label>
<br>
<input type="checkbox" id="isPost" name="isPost">
<br>
<label for="remote">Use remote node?</label>
<br>
<input type="checkbox" id="remote" name="remote">
<br>
<label for="remoteNode">Remote node ID</label>
<br>
<input type="number" id="remoteNode" name="remoteNode" max="999999">
<br>
<br>
<input id="hopperPeerID" name="hopperPeerID" type="hidden">
<button onclick="hopID();">Get</button>
<br>
<div style="background-color: var(--palette-three);" id="lilypad"></div>
{% endblock %}

View File

@ -1,20 +1,18 @@
<div class="plank" hx-ext="ws" ws-connect="/bubble"> {% extends "shared/plank.html" %}
<img src="/res/img/bubbledisplay.png"> {% set plankTitle = "bubble" %}
<br> {% set formID = plankTitle %}
Bubble {% block insert %}
<br> Responses: <ul id="chat_room" hx-swap="afterend">
<br> </ul>
Responses: <ul id="chat_room" hx-swap="afterend"> <br>
</ul> {% include "shared/p2chat.html" %}
<br> Peer ID:<br>
<form id="bubble" ws-send> <input name="recipientID" id="recipientID" type="number" max="999999"><br>
Peer ID:<br> Node ID:<br>
<input name="recipientID" id="recipientID" type="number" max="999999"><br> <input name="recipientNode" id="recipientNode" type="number" max="999999"><br>
Node ID:<br> Data<br> <textarea style="min-width: 200px;min-height: 200px;" type="textarea" name="chat_message"
<input name="recipientNode" id="recipientNode" type="number" max="999999"><br> id="chat_message"></textarea>
Data<br> <textarea style="min-width: 200px;min-height: 200px;" type="textarea" name="chat_message"></textarea> <br>
<br> <input type="hidden" name="bID" id="bID">
<input type="hidden" name="bID" id="bID"> <button onclick="document.getElementById('bID').value = document.getElementById('peerID').value">Send</button>
<button onclick="document.getElementById('bID').value = document.getElementById('peerID').value">Send</button> {% endblock %}
</form>
</div>

View File

@ -0,0 +1,22 @@
<div id="p2chat">
<input type="hidden" id="p2img">
<input type="hidden" id="color" value="#000000">
Background color:
<div id="rendertemplate" style="max-width: 20px;min-height:20px;background-color: #000000;"
onclick="try{splash(this);}catch(e){alert(e);}"></div>
<br>
<div id="controls">
Color picker: <input type="color" onchange="try{setColor(this);}catch(e){alert(e);}" value="#ffffff" id="picker">
Hex input: <input type="text" maxlength="6" onchange="try{setColor(this);}catch(e){alert(e);}" value="000000" />
</div>
<br>
<div id="create">
X<input type="number" min="8" max="64" placeholder="8" id="canvasX" value="8" />
Y<input type="number" min="8" max="64" placeholder="8" id="canvasY" value="8" />
<button onclick="try{cGen(this);}catch(e){alert(e);}">Generate workspace</button>
<button onclick="try{p2ToBubble();}catch(e){alert(e);}">Add to message</button>
</div>
<br>
<div id="render">
</div>
</div>

View File

@ -0,0 +1,18 @@
<div class="plank" hx-ext="ws" ws-connect="/bubble">
{% if icon is defined %}
<img src="/res/img/{{ icon }}">
{% else %}
<img src="/res/img/{{ plankTitle }}display.png">
{% endif %}
{{ plankTitle|capitalize }}<br><br>
<br>
<button onclick="toggle('{{ plankTitle }}PI');">Display/Hide</button>
<br>
<br>
<div class="plankInner" id="{{ plankTitle }}PI">
<form id="{{ formID }}" ws-send>
{% block insert %}
{% endblock %}
</form>
</div>
</div>

View File

@ -0,0 +1,184 @@
from Services.Action import Action
from Config.Context import Context
from Packets.Message import Message
import logging
logger = logging.getLogger("__main__." + __name__)
class YCTX(Context):
def __init__(
self,
packetsID,
packetCount,
pAction,
todo,
cryptographyInfo,
sourceNode,
subMessage=False,
subMessages={},
submessagesIDs=[],
eData=None,
):
super().__init__(
sourceNode=sourceNode,
packetsID=packetsID,
packetCount=packetCount,
pAction=pAction,
cryptographyInfo=cryptographyInfo,
subMessages=subMessages,
submessagesIDs=submessagesIDs,
eData=eData,
subMessage=subMessage,
)
self.todo = todo
class Yellow:
# TODO: Submessage completion actions
pActions = []
def __init__(
self,
yctx: YCTX,
):
self.yctx = yctx
self.message = None
self.submessages = yctx["subMessages"]["val"]
self.submessagesIDs = yctx["submessagesIDs"]["val"]
self.finishedSubmessages = {}
self.dataOrder = []
self.data = []
self.nonce = None
self.tag = None
self.gotHead = False
self.todo = yctx.todo
if yctx["eData"]["val"] != None:
self.dataOrder = yctx["eData"]["val"]["dataOrder"]
self.data = yctx["eData"]["val"]["data"]
def checkComplete(self):
logger.log(30, "In check complete")
for submessage in self.submessagesIDs:
submessage = str(submessage)
if submessage not in self.finishedSubmessages.keys():
logger.log(30, f"Submessage {submessage} missing")
return False
logger.log(30, "Submessages complete")
return True
async def id(self):
return self.packetsID
async def processPacket(self, p, subMessage=False, rdoaoc=[]):
doCheck = True
if subMessage == False and p["packetNumber"] != False:
#if not p["packetNumber"] in self.dataOrder:
if p["packetNumber"] not in self.dataOrder:
self.data.append(p["data"])
self.dataOrder.append(p["packetNumber"])
logger.log(10, "data")
logger.log(10, self.data)
else:
doCheck = False
elif p["packetNumber"] == False:
if not self.gotHead:
self.data.append(False)
self.dataOrder.append(False)
self.submessagesIDs = p["submessages"]
self.gotHead = True
else:
doCheck = False
else:
if not str(p["packetsID"]) in self.submessages.keys():
self.submessages[str(p["packetsID"])] = {
"data": [],
"dataOrder": []
}
if p["packetNumber"] not in self.submessages[str(p["packetsID"])]["dataOrder"]:
self.submessages[str(p["packetsID"])]["data"].append(p["data"])
self.submessages[str(p["packetsID"])]["dataOrder"].append(
p["packetNumber"])
else:
doCheck = False
if doCheck:
if subMessage != False:
if len(self.submessages[str(p["packetsID"])]["data"]) > (p["packetCount"]-1):
self.finishedSubmessages[str(p["packetsID"])] = Message.reassemble(
None, self.submessages[str(p["packetsID"])], self.yctx["cryptographyInfo"]["val"], packetCount=p["packetCount"], subMessage=True, yctx=self.yctx
)
# TODO: Trigger doact
return await self.doAct(subMessage=self.finishedSubmessages[str(p["packetsID"])])
elif len(self.data) >= (self.yctx["packetCount"]["val"]):
logger.log(
20, "Finished receiving for primary message " +
str(self.yctx["packetsID"]["val"])
)
"""
if self.yctx["wantFullResponse"]["val"] != False:
# TO DO: implement loop
# responseLoop(packets_id)
pass
"""
# self.data = Message.reassemble(None, self, self.yctx["cryptographyInfo"]["val"])
return await self.doAct(repeatDataOnActions=rdoaoc)
async def dump(self):
msg = {}
msg["packetsID"] = self.yctx["packetsID"]["val"]
msg["data"] = self.data
msg["pAction"] = self.yctx["pAction"]["val"]
msg["submessages"] = self.submessages
return msg
async def doAct(
self,
setpAction=False,
repeatDataOnActions=[],
subMessage=False
):
# TODO; Get submessage instead of primary
m = await self.dump()
if subMessage != False:
logger.log(30, "Submessage complete")
# self.submessages[m["packetsID"]] = self.yctx["crytopgraphyInfo"]["val"].decrypt(m["data"])
# TODO: Debug
logger.log(10, subMessage)
if "nonce" in subMessage.keys():
self.nonce = subMessage["nonce"]
self.tag = subMessage["tag"]
if self.checkComplete():
if len(self.data) >= (self.yctx["packetCount"]["val"]):
# TODO: Raising out of bounds error
pAction = self.pActions[int(m["pAction"])]
logger.log(30, "Full message completed")
# TODO: Decrypt
self.data = Message.reassemble(
None, self, self.yctx["cryptographyInfo"]["val"])
"""
data = self.cryptographyInfo.decrypt(m["data"])
if data == False:
self.cryptographyInfo.sessionSetup()
data = self.cryptographyInfo.decrypt(m["data"])
"""
# TODO: Go over whether we need to limit
# this from being a field in the data for
# primary messages
"""
todo.append(
Action(
"cLog", {"message": "Unknown pAction " + m["pAction"], "priority": 20}
)
)
)
"""
self.data["submessages"] = self.submessages
self.data["yctx"] = self.yctx
act = Action(pAction, self.data)
self.todo.append(act)
if len(repeatDataOnActions) > 0:
for caction in repeatDataOnActions:
cact = Action(caction, self.data)
#self.todo.add(cact)
self.todo.append(cact)

View File

@ -0,0 +1,10 @@
from . import hopper, map, cryptography, catch, bubble, daisy
classDict = {
"bubble": bubble.Bubble,
"catch": catch.Catch,
"cryptography": cryptography.CryptographyFilter,
"daisy": daisy.Daisy,
"hopper": hopper.Hopper,
"map": map.Map,
}

View File

@ -1,19 +1,12 @@
async def filter(completeMessage, recipient, recipientNode, onodeID, todo): from Sponge.Protocols.Yellow import Yellow
# TODO: Forwarding message to next node
# TODO: Method to get next node in path to recipient node
class Bubble(Yellow):
""" """
Peer to peer protol Peer to peer protol
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/Sponge/Protocols/bubble.py>`__ `🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/Sponge/Protocols/bubble.py>`__
""" """
m = completeMessage pActions = ["sendToPeer"]
if recipientNode == onodeID:
todo.append(
{
"action": "sendToPeer",
"data": {"res": m["data"]["data"], "recipient": recipient},
}
)
else:
# TODO: Forwarding message to next node
# TODO: Method to get next node in path to recipient node
# self.t.addPackets(m.data, sender, senderDisplay, recipient, recipientNode)
pass

View File

@ -1,21 +1,11 @@
async def filter(completeMessage, recipient, recipientNode, todo): from Sponge.Protocols.Yellow import Yellow
class Catch(Yellow):
""" """
Catch exchange protocol Catch exchange protocol
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py>`__ `🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py>`__
""" """
m = completeMessage
# TODO: Sending to other nodes clients pActions = ["sendCatch", "routeCatch", "syncIndex"]
todo.append(
{
"action": "sendCatch",
"data": {
"toLocal": True,
"recipientNode": recipientNode,
"recipient": recipient,
"head": m["head"],
"body": m["body"],
"fins": m["fins"],
},
}
)

View File

@ -1,17 +1,11 @@
async def filter(completeMessage, recipientNode, todo): from Sponge.Protocols.Yellow import Yellow
class CryptographyFilter(Yellow):
""" """
Cryptographic operations protocol Cryptographic operations protocol
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/cryptography.py>`__ `🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/cryptography.py>`__
""" """
todo.append(
{ pActions = ["initCryptography"]
"action": "keyDeriveDH",
"data": {
"publicKey": completeMessage["data"]["publicKey"],
"params": completeMessage["data"]["params"],
"recipientNode": recipientNode,
},
}
)
# logging.log(10, "Adding cryptography request")

View File

@ -0,0 +1,9 @@
from Sponge.Protocols.Yellow import Yellow
class Daisy(Yellow):
"""
Catch exchange protocol
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py>`__
"""

View File

@ -0,0 +1,11 @@
from Sponge.Protocols.Yellow import Yellow
class Hopper(Yellow):
"""
Internet inter(h)op protocol
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/hopper.py>`__
"""
pActions = ["hop", "routeHop"]

View File

@ -1,22 +1,17 @@
async def filter(completeMessage, todo): from Sponge.Protocols.Yellow import Yellow
class Map(Yellow):
""" """
Network mapping protocol Network mapping protocol
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/map.py>`__ `🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/map.py>`__
""" """
m = completeMessage
todo.append( pActions = ["map", "initCryptography"]
{
"action": "map", def process(self, message, isSubMessage=False):
"data": { rdoa = []
"onodeID": m["data"]["onodeID"], if message["pAction"] == 0:
"mnodeID": m["data"]["mnodeID"], rdoa.append(self.pActions[1])
}, super().process(message, isSubMessage=isSubMessage, repeatDataOnActions=rdoa)
}
)
todo.append(
{
"action": "initNodeDH",
"data": {"mnodeID": m["data"]["mnodeID"], "onodeID": m["data"]["onodeID"]},
}
)

View File

@ -1,13 +1,14 @@
import logging
import msgpack import msgpack
import traceback import traceback
from Packets.Message import Message from Packets.Message import Message
import Sponge.Protocols.bubble import Sponge.Protocols as Protocols
import Sponge.Protocols.map
import Sponge.Protocols.catch
import Sponge.Protocols.cryptography
logger = logging.getLogger("__main__." + __name__)
class Filter: class Filter:
""" """
Packet filtering orchestration Packet filtering orchestration
@ -30,13 +31,14 @@ class Filter:
PierMesh node ID PierMesh node ID
""" """
def __init__(self, cache, onodeID, todo, cLog): def __init__(self, cache, onodeID, todo, cryptographyInfo):
self.cLog = cLog
self.cache = cache self.cache = cache
self.cryptographyInfo = cryptographyInfo
""" """
Messages is temporary storage for unfinished messages Messages is temporary storage for unfinished messages
""" """
self.messages = {} self.messages = {}
self.submessages = {}
self.completed = [] self.completed = []
self.todo = todo self.todo = todo
self.onodeID = onodeID self.onodeID = onodeID
@ -49,7 +51,7 @@ class Filter:
msgpack.loads(payload) msgpack.loads(payload)
return True return True
except Exception as e: except Exception as e:
self.cLog(20, "Not msgpack encoded, skipping") logger.debug("Not msgpack encoded, skipping")
return False return False
def selfCheck(self, packet): def selfCheck(self, packet):
@ -57,93 +59,128 @@ class Filter:
Check if this is a self packet, if so skip Check if this is a self packet, if so skip
""" """
if packet["fromId"] == packet["toId"]: if packet["fromId"] == packet["toId"]:
self.cLog(20, "Self packet, ignored") logger.log(20, "Self packet, ignored")
return False return False
else: else:
return True return True
async def protoMap(self, protocolID: int): async def protoMap(self, protocolID: int, packetsID, packetCount, sourceNode, submessagesIDs=[], pAction=None):
""" """
Get protocol from protocol ID using the mlookup table Get protocol from protocol ID using the mlookup table
""" """
protocolID = str(protocolID).zfill(6) protocolID = str(protocolID).zfill(6)
return self.cache.get("mlookup").get()[protocolID] proto = self.cache.get("mlookup").get()[protocolID]
subMessages = {}
for pid in submessagesIDs:
if pid in self.submessages:
subMessages[pid] = self.submessages[pid]
ctx = Protocols.Yellow.YCTX(
packetsID,
packetCount,
pAction,
self.todo,
self.cryptographyInfo,
sourceNode,
submessagesIDs=submessagesIDs,
subMessages=subMessages
)
cf = Protocols.classDict[proto](
ctx
)
return cf
async def protoRoute(self, completeMessage: dict): # async def protoRoute(self, completeMessage: dict):
""" # """
Route message to proper protocol handler # Route message to proper protocol handler
""" # """
m = completeMessage # m = completeMessage
""" # """
Shorthand reference completeMessage for ease # Shorthand reference completeMessage for ease
""" # """
sender = m["sender"] # sender = m["sender"]
senderDisplay = m["senderDisplayName"] # senderDisplay = m["senderDisplayName"]
recipient = m["recipient"] # recipient = m["recipient"]
recipientNode = m["recipientNode"] # recipientNode = m["recipientNode"]
# TODO: Fix packets to use class # protocol = await self.protoMap(m["packetsClass"])
protocol = await self.protoMap(m["packetsClass"]) # logger.log(20, "Protocol: " + protocol)
self.cLog(20, "Protocol: " + protocol) # if protocol == "bubble":
if protocol == "bubble": # await Sponge.Protocols.bubble.filter(
await Sponge.Protocols.bubble.filter( # m, recipient, recipientNode, self.onodeID, self.todo
m, recipient, recipientNode, self.onodeID, self.todo # )
) # elif protocol == "map":
elif protocol == "map": # await Sponge.Protocols.map.filter(m, self.todo)
await Sponge.Protocols.map.filter(m, self.todo) # elif protocol == "catch":
elif protocol == "catch": # await Sponge.Protocols.catch.filter(m, recipient, recipientNode, self.todo)
await Sponge.Protocols.catch.filter(m, recipient, recipientNode, self.todo) # elif protocol == "cryptography":
elif protocol == "cryptography": # await Sponge.Protocols.cryptography.filter(
await Sponge.Protocols.cryptography.filter( # completeMessage, recipientNode, self.todo
completeMessage, recipientNode, self.todo # )
) # elif protocol == "hopper":
else: # await Sponge.Protocols.hopper.filter(
self.cLog(30, "Cant route, no protocol") # completeMessage, self.todo, recipient, recipientNode
# )
# else:
# logger.log(30, "Cant route, no protocol")
async def sieve(self, packet): async def sieve(self, packet):
""" """
Base filtering logic, takes a single MeshTastic packet Base filtering logic, takes a single MeshTastic packet
""" """
# TODO: Instantiating the protocol on submessage
p = packet["decoded"]["payload"] p = packet["decoded"]["payload"]
if self.selfCheck(packet) and self.mCheck(p): if self.selfCheck(packet) and self.mCheck(p):
try: try:
p = msgpack.loads(p) p = msgpack.loads(p)
self.cLog(20, p) logger.log(20, p)
packetsID = p["packetsID"] packetsID = p["packetsID"]
packetsClass = p["packetsClass"] packetsClass = p["packetsClass"]
if packetsID == 0: if packetsID == 0:
self.cLog(20, "Single packet") logger.log(20, "Single packet")
# Do sp handling # Do sp handling
pass pass
if packetsID in self.completed: if packetsID in self.completed:
raise ValueError("Message already completed") raise ValueError("Message already completed")
if not (packetsID in self.messages): if not (packetsID in self.messages) and (
self.messages[packetsID] = { not "wantFullResponse" in p.keys()
"packetCount": p["packetCount"], ):
"data": [], if "primaryMessage" in p.keys():
"finished": False, if p["primaryMessage"] in self.messages.keys():
"dataOrder": [], # TODO: Temporary store for submessages if main hasnt arrived
} # TODO: Check for submessages when instantiating
if "wantFullResponse" in p.keys(): await self.messages[p["primaryMessage"]].processPacket(p, subMessage=True)
for k in p.keys(): else:
if k != "data": if not str(p["packetsID"]) in self.submessages.keys():
self.messages[packetsID][k] = p[k] self.submessages[str(p["packetsID"])] = {
elif not p["packetNumber"] in self.messages[packetsID]["dataOrder"]: "data": [],
self.messages[packetsID]["data"].append(p["data"]) "dataOrder": []
self.messages[packetsID]["dataOrder"].append(p["packetNumber"]) }
if (len(self.messages[packetsID]["data"])) >= ( self.submessages[str(p["packetsID"])]["data"].append(p["data"])
self.messages[packetsID]["packetCount"] - 1 self.submessages[str(p["packetsID"])]["dataOrder"].append(p["packetNumber"])
) and ("wantFullResponse" in self.messages[packetsID].keys()): #await self.messages[p["primaryMessage"]].processPacket(p, subMessage=True)
self.cLog(20, "Finished receiving for message " + str(packetsID))
self.messages[packetsID]["finished"] = True else:
if self.messages[packetsID]["wantFullResponse"] != False: self.messages[packetsID] = await self.protoMap(
# TO DO: implement loop p["packetsClass"], packetsID, p["packetCount"], 0
# responseLoop(packets_id) )
pass await self.messages[packetsID].processPacket(p)
completeMessage = self.messages[packetsID] elif "wantFullResponse" in p.keys():
completeMessage["data"] = Message.reassemble(None, completeMessage) if packetsID in self.messages.keys():
del self.messages[packetsID] self.messages[packetsID].yctx["pAction"]["val"] = p["pAction"]
self.completed.append(packetsID) self.messages[packetsID].yctx["sourceNode"]["val"] = p["sourceNode"]
self.cLog(20, "Assembly completed, routing") # self.messages[packetsID].submessageIDs = p["submessages"]
await self.protoRoute(completeMessage) await self.messages[packetsID].processPacket(p)
else:
self.messages[packetsID] = await self.protoMap(
p["packetsClass"],
packetsID,
p["packetCount"],
p["sourceNode"],
submessagesIDs=p["submessages"],
pAction=p["pAction"]
)
await self.messages[packetsID].processPacket(p)
else:
await self.messages[packetsID].processPacket(p)
except Exception: except Exception:
self.cLog(30, traceback.format_exc()) logger.log(30, traceback.format_exc())

View File

@ -1,13 +1,25 @@
import meshtastic import meshtastic
import meshtastic.serial_interface import meshtastic.serial_interface
from pubsub import pub from pubsub import pub
from Packets.Message import Message from Packets.Message import Message
from Packets.Messages.Protocols.map.Announce import AnnounceMessage
from Packets.SubMessage import SubMessage
import time import time
import sys
import random
import msgpack import msgpack
import asyncio import asyncio
import uuid
import traceback
import logging
logger = logging.getLogger("__main__." + __name__)
class Transceiver: class Transceiver:
""" """
@ -50,70 +62,75 @@ class Transceiver:
""" """
def __init__(self, device, filter, onodeID, cache, catch, cryptographyInfo, cLog): def __init__(self, device, filter, onodeID, cache, catch, cryptographyInfo, network):
self.cLog = cLog try:
self.cryptographyInfo = cryptographyInfo self.busy = False
self.filter = filter self.network = network
self.tcache = cache self.cryptographyInfo = cryptographyInfo
self.tcatch = catch self.filter = filter
self.notConnected = True self.tcache = cache
self.messages = {} self.tcatch = catch
self.acks = {} self.notConnected = True
self.onodeID = onodeID self.messages = {}
# Be careful with this self.acks = {}
self.cpid = 0 self.onodeID = onodeID
self.tasks = {} # Be careful with this
# TODO: use node id to deliver directly self.cpid = 0
pub.subscribe(self.onReceive, "meshtastic.receive") self.tasks = {}
pub.subscribe(self.onConnection, "meshtastic.connection.established") pub.subscribe(self.onReceive, "meshtastic.receive")
self.interface = meshtastic.serial_interface.SerialInterface(device) pub.subscribe(self.onConnection, "meshtastic.connection.established")
i = 0 self.interface = meshtastic.serial_interface.SerialInterface(device)
while self.notConnected: i = 0
if i % 5000000 == 0: while self.notConnected:
self.cLog(20, "Waiting for node initialization...") if i % 5000000 == 0:
i += 1 logger.log(20, "Waiting for node initialization...")
self.cLog(20, "Initialized") i += 1
logger.log(20, "Initialized")
except Exception as e:
logger.log(30, traceback.format_exc())
raise e
# TODO: Sending packets across multiple nodes/load balancing/distributed packet transmission/reception # TODO: Sending packets across multiple nodes/load balancing/distributed packet transmission/reception
def onReceive(self, packet, interface): def onReceive(self, packet, interface):
""" """
Run each received packet through Sponge.base.Filters sieve using a new event loop Run each received packet through Sponge.base.Filters sieve using a new event loop
""" """
self.busy = True
asyncio.new_event_loop().run_until_complete(self.filter.sieve(packet)) asyncio.new_event_loop().run_until_complete(self.filter.sieve(packet))
self.busy = False
self.tcache.refresh() self.tcache.refresh()
async def sendAnnounce(self): async def sendAnnounce(self, dontRespond=False):
""" """
Send an announce packet (contains basic network mapping information) every so often so new nodes autoconnect Send an announce packet (contains basic network mapping information) every so often so new nodes autoconnect
""" """
await self.addPackets( try:
msgpack.dumps( r = AnnounceMessage(
{ self.onodeID,
"onodeID": self.onodeID, 0,
"mnodeID": self.interface.localNode.nodeNum, self.onodeID,
} self.cryptographyInfo,
), {"onodeID": self.onodeID, "mnodeID": self.interface.localNode.nodeNum, "dontRespond": dontRespond},
self.onodeID, )
None, await self.sendMessage(r)
True, except:
None, logger.log(30, traceback.format_exc())
packetsClass=0,
)
def onConnection(self, interface, topic=pub.AUTO_TOPIC): def onConnection(self, interface, topic=pub.AUTO_TOPIC):
""" """
When the node connects start announce loop and end the waiting loop When the node connects start announce loop and end the waiting loop
""" """
asyncio.run(self.sendAnnounce()) #asyncio.run(self.sendAnnounce())
self.notConnected = False self.notConnected = False
def responseCheck(self, packet): def responseCheck(self, packet):
""" """
On acknowldgement response set acks based on response On acknowldgement response set acks based on response
TODO: Stop this being sent to sieve
""" """
rid = packet["decoded"]["requestId"] rid = packet["decoded"]["requestId"]
if packet["decoded"]["routing"]["errorReason"] == "MAX_RETRANSMIT": if packet["decoded"]["routing"]["errorReason"] == "MAX_RETRANSMIT":
self.cLog(20, "Got ack error") logger.debug(f"Got ack error for packet {rid}")
self.acks[str(rid)] = False self.acks[str(rid)] = False
else: else:
self.acks[str(rid)] = True self.acks[str(rid)] = True
@ -185,6 +202,7 @@ class Transceiver:
break break
return True return True
# TODO: Deprecate
async def addPackets( async def addPackets(
self, self,
data, data,
@ -234,12 +252,13 @@ class Transceiver:
if recipientNode == None: if recipientNode == None:
self.send(p) self.send(p)
else: else:
self.cLog(10, "Sending target: " + str(directID)) logger.log(10, "Sending target: " + str(directID))
if directID != False: if directID != False:
recipientNode = directID recipientNode = directID
self.send(p, recipientNode=recipientNode) self.send(p, recipientNode=recipientNode)
awaitTask = asyncio.create_task(self.awaitResponse(self.cpid)) awaitTask = asyncio.create_task(self.awaitResponse(self.cpid))
await asyncio.sleep(1) await asyncio.sleep(1)
# TODO: pid fix
currentTask = { currentTask = {
"ob": awaitTask, "ob": awaitTask,
"pid": str(self.cpid), "pid": str(self.cpid),
@ -248,13 +267,79 @@ class Transceiver:
} }
self.tasks[str(self.cpid)] = currentTask self.tasks[str(self.cpid)] = currentTask
async def sendMessage(self, message: Message):
try:
# self.busy = True
# TODO: Send nonce subMessage after main message
# TODO: Instantiate SubMessages with HeaderPacket of primary message
hpacket = message.fullPackets[0]
logger.log(30, message.nonce)
nsm = SubMessage(
hpacket.sender,
hpacket.senderDisplayName,
hpacket.sourceNode,
hpacket.recipient,
hpacket.recipientNode,
self.cryptographyInfo,
hpacket.packetsClass,
hpacket.pAction,
{"nonce": message.nonce, "tag": message.tag},
target=message.target,
primaryMessage=hpacket.packetsID
)
message.fullPackets[0].submessages.append(nsm.packetsID)
message.packets[0] = message.fullPackets[0].dump()
logger.log(30, message.packets[0])
logger.log(30, nsm.packets)
for m in [{"primary": message}, {"nonce": nsm}]:
while self.busy:
await asyncio.sleep(2)
m = [v for v in m.values()][0]
# logger.log(30, m)
await asyncio.sleep(5)
isHead = True
curPacketCount = len(m.packets)
for it, p in enumerate(m.packets):
it += 1
pid = str(uuid.uuid4())
#print(p)
logger.info(f"Packet {it:03d}/{curPacketCount:03d}, Size is: " + str(sys.getsizeof(p)))
doNumber = 3
if isHead:
doNumber = 5
isHead = False
if m.target == False:
logger.log(10, "Sending any")
for i in range(doNumber):
self.send(p)
await asyncio.sleep(1)
else:
logger.log(10, "Sending target: " + str(m.recipientNode))
for i in range(doNumber):
self.send(p, recipientNode=self.network.doLookup(m.recipientNode))
await asyncio.sleep(1)
awaitTask = asyncio.create_task(self.awaitResponse(pid))
currentTask = {
"ob": awaitTask,
"pid": pid,
"packet": p,
"retry": False,
}
self.tasks[pid] = currentTask
await asyncio.sleep(3)
except Exception:
logger.log(30, traceback.format_exc)
self.busy = False
async def progressCheck(self): async def progressCheck(self):
""" """
Checks if acknowldgement was received per packet and if not resends Checks if acknowldgement was received per packet and if not resends
""" """
while True: while True:
await asyncio.sleep(90) await asyncio.sleep(480)
self.cLog( logger.log(
20, "Checking progress of {0} tasks".format(len(self.tasks.keys())) 20, "Checking progress of {0} tasks".format(len(self.tasks.keys()))
) )
doneFlag = True doneFlag = True
@ -271,12 +356,12 @@ class Transceiver:
elif retry < 3: elif retry < 3:
retry += 1 retry += 1
else: else:
self.cLog(30, "Too many retries") logger.log(30, "Too many retries")
remove = True remove = True
if remove: if remove:
del self.tasks[task["pid"]] del self.tasks[task["pid"]]
else: else:
self.cLog(20, "Doing retry") logger.log(20, "Doing retry")
doneFlag = False doneFlag = False
# TODO: Resend to specific node # TODO: Resend to specific node
self.send(task["packet"]) self.send(task["packet"])
@ -299,7 +384,17 @@ class Transceiver:
""" """
Announce loop runner Announce loop runner
""" """
while (self.notConnected or self.busy):
if self.busy:
logger.log(30, "Transceiver is busy, waiting")
await asyncio.sleep(2)
while True: while True:
self.cLog(10, "Announce") wait = random.randrange(600, 900)
logger.info(f"Waiting {wait} seconds for next announce")
await asyncio.sleep(wait)
while self.busy:
logger.info("Transceiver is busy, waiting")
await asyncio.sleep(2)
await asyncio.sleep(5)
logger.info("Announce")
await self.sendAnnounce() await self.sendAnnounce()
await asyncio.sleep(180)

2
src/debug Executable file
View File

@ -0,0 +1,2 @@
pylint --errors-only --disable=C,R run.py > tmp.debug
nano tmp.debug

View File

@ -3,44 +3,136 @@ from Sponge.base import Filter
from Siph.map import Network from Siph.map import Network
from Daisy.Catch import Catch from Daisy.Catch import Catch
from Daisy.Cache import Cache from Daisy.Cache import Cache
from Daisy.Index import Index
from Daisy.CryptographyUtil import SteelPetal
from Splash.serve import Server from Splash.serve import Server
from Transceiver.Transceiver import Transceiver from Transceiver.Transceiver import Transceiver
from Cryptography.WhaleSong import DHEFern from Cryptography.WhaleSong import Transport
from ui import TUI
import Components.hopper as hopper
from Packets.Messages.Protocols.hopper.Response import HopperResponse
from Packets.Messages.Protocols.catch.Response import CatchResponse
from Packets.Messages.Protocols.cryptography.Handshake import Handshake
import tlog
from tlog import VHandler
# Generic imports # Generic imports
import logging import logging
import os import os
import asyncio import asyncio
import sys
import time import time
import datetime import datetime
import traceback import traceback
import threading import threading
import random import random
import argparse
import configparser
import shutil
import queue
import json
# Process management library # Process management library
import psutil import psutil
if __name__ == "__main__": import uvloop
global nodeOb, tuiOb
"""
Global objects for the PierMesh service and the TUI so we can terminate the associated processes later
"""
nodeOb = None
tuiOb = None
# Pull startup parameters
device, webPort, serverInfoFile, delay, nodeNickname = sys.argv[1:]
# Set up file based logging # TODO: Switch config to toml
logPath = "logs" # TODO: Better protocol management
fileName = datetime.datetime.now().strftime("%m%d%Y_%H%M%S") # TODO: Dry run
logFormat = "%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s"
logging.basicConfig( if __name__ == "__main__":
format=logFormat, """
level=logging.INFO, Global objects for the PierMesh service and the TUI so we can terminate the associated processes later and configuration objects for the command line and config (.piermesh)
filename="{0}/{2}_{1}.log".format(logPath, fileName, nodeNickname), """
logger = ""
passkey = input("Enter node decryption key: ")
#psk = input("Enter psk: ")
nodeOb = None
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--device", help="Set transceiver device path")
parser.add_argument("-p", "--port", help="Web UI server port")
parser.add_argument("-n", "--nickname", help="Node nickname")
parser.add_argument(
"-s", "--startupDelay", help="Startup delay (useful for testing)"
) )
parser.add_argument(
"-o", "--override", help="Whether to override config", default=False
)
parser.add_argument("-x", "--showTUI",
help="Whether to show TUI", default=True)
parser.add_argument(
"-l",
"--confList",
help="Comma separated list of conf files to load for multi-node mode (TODO)",
default=False,
)
parser.add_argument(
"-k",
"--PSK",
help="Pre shared key for initializing communications",
default=False,
)
argConfig = parser.parse_args()
config = configparser.ConfigParser()
if argConfig.confList != False:
pass
if not os.path.exists(".piermesh"):
print("No config at .piermesh, duplicating and loading example config...")
shutil.copyfile(".piermesh.example", ".piermesh")
config.read(".piermesh")
device = ""
if "transceiverPort" in config["OPERATOR_REQUIRED"]:
if argConfig.override:
device = argConfig.device
else:
device = config["OPERATOR_REQUIRED"]["transceiverPort"]
else:
if argConfig.device == False:
print("No device set exiting...")
exit(0)
else:
device = argConfig.device
webPort = config["DEFAULT"]["WebUIPort"]
if argConfig.override:
webPort = argConfig.port
else:
if "WebUIPort" in config["OPERATOR_OVERRIDES"]:
webPort = config["OPERATOR_OVERRIDES"]["WebUIPort"]
webPort = int(webPort)
delay = config["DEFAULT"]["StartupDelay"]
if argConfig.override:
delay = argConfig.startupDelay
else:
if "StartupDelay" in config["OPERATOR_OVERRIDES"]:
delay = config["OPERATOR_OVERRIDES"]["StartupDelay"]
delay = int(delay)
nodeNickname = config["DEFAULT"]["Nickname"]
if argConfig.override:
nodeNickname = argConfig.nickname
else:
if "Nickname" in config["OPERATOR_OVERRIDES"]:
nodeNickname = config["OPERATOR_OVERRIDES"]["Nickname"]
showTUI = config["DEFAULT"]["ShowTUI"]
if argConfig.override:
showTUI = argConfig.showTUI
else:
if "ShowTUI" in config["OPERATOR_OVERRIDES"]:
showTUI = config["OPERATOR_OVERRIDES"]["ShowTUI"]
showTUI = bool(showTUI)
psk = False
if argConfig.override:
if argConfig.PSK == False:
print("No PSK set quitting...")
exit(0)
else:
psk = argConfig.PSK
#else:
if "PSK" in config["OPERATOR_REQUIRED"]:
psk = config["OPERATOR_REQUIRED"]["PSK"]
print("Staggering {0} seconds please wait".format(delay))
time.sleep(delay)
class Node: class Node:
@ -93,37 +185,80 @@ class Node:
""" """
def __init__(self): def __init__(self):
self.toLog = []
actionsList = [f for f in dir(self) if "action" in f] actionsList = [f for f in dir(self) if "action" in f]
self.actions = {} self.actions = {}
for a in actionsList: for a in actionsList:
self.actions[a.split("_")[1]] = getattr(self, a) self.actions[a.split("_")[1]] = getattr(self, a)
self.todo = [] self.todo = []
self.cLog(20, "Past action mapping") logger.log(20, "Past action mapping")
self.network = Network() self.network = Network()
self.catch = Catch(walk=True) self.daisyCryptography = SteelPetal(passkey)
self.cache = Cache(walk=True) # TODO: Better directory structure
self.catch = Catch(self.daisyCryptography, walk=True, path=nodeNickname)
self.cache = Cache(self.daisyCryptography, walk=True)
self.cache.get("mlookup").json_to_msg("daisy/mlookup")
logger.info("Protocols:\n" + json.dumps(self.cache.get("mlookup").get(), indent=4))
self.remoteCatchIndex = Index(nodeNickname, self.daisyCryptography)
serverInfoFile = nodeNickname + ".info"
self.nodeInfo = self.cache.get(serverInfoFile) self.nodeInfo = self.cache.get(serverInfoFile)
if self.nodeInfo == False: if self.nodeInfo == False:
self.cache.create(serverInfoFile, {"nodeID": random.randrange(0, 1000000)}) # TODO: Get/set toml
self.cache.create(
serverInfoFile, {"nodeID": random.randrange(0, 1000000)})
self.nodeInfo = self.cache.get(serverInfoFile) self.nodeInfo = self.cache.get(serverInfoFile)
self.network.addin(self.nodeInfo.get()["nodeID"]) self.network.addin(self.nodeInfo.get()["nodeID"])
self.cLog(20, "Siph network stack initialized") logger.log(20, "Siph network stack initialized")
self.onodeID = str(self.nodeInfo.get()["nodeID"]) self.onodeID = str(self.nodeInfo.get()["nodeID"])
self.server = None
self.sponge = Filter(self.cache, self.onodeID, self.todo, self.cLog) logger.log(20, "Cryptography initializing")
self.cLog(20, "Filter initialized") self.cryptographyInfo = Transport(
self.cLog(10, "Command line arguments: " + ", ".join(sys.argv)) self.cache, nodeNickname, self.daisyCryptography, psk
self.oTransceiver = None )
self.cLog(20, "Cryptography initializing") logger.log(20, "Cryptography initialized")
self.cryptographyInfo = DHEFern(self.cache, nodeNickname, self.cLog)
self.cLog(20, "Cryptography initialized") self.sponge = Filter(self.cache, self.onodeID,
self.todo, self.cryptographyInfo)
logger.log(20, "Filter initialized")
# TODO: Run in different thread, dispatch through queue
self.oTransceiver = Transceiver(
device,
self.sponge,
self.onodeID,
self.cache,
self.catch,
self.cryptographyInfo,
self.network
)
self.server = Server(
self.oTransceiver,
self.catch,
self.onodeID,
self.network,
self.cryptographyInfo,
self.remoteCatchIndex,
self.cache,
)
self.processed = [] self.processed = []
self.proc = psutil.Process(os.getpid()) self.proc = psutil.Process(os.getpid())
self.mTasks = {} self.mTasks = {}
async def main(self):
self.mTasks["list"] = asyncio.create_task(self.spongeListen())
await asyncio.sleep(1)
# self.mTasks["pct"] = asyncio.create_task(self.oTransceiver.progressCheck())
# await asyncio.sleep(1)
self.mTasks["mon"] = asyncio.create_task(self.monitor())
await asyncio.sleep(1)
self.mTasks["announce"] = asyncio.create_task(
self.oTransceiver.announce())
await asyncio.sleep(1)
await self.server.app.start_server(port=int(webPort))
def cLog(self, priority: int, message: str): def cLog(self, priority: int, message: str):
logger = logging.getLogger(__name__)
""" """
Convenience function that logs to the ui and log files Convenience function that logs to the ui and log files
@ -140,8 +275,18 @@ class Node:
------- -------
None None
""" """
logging.log(priority, message)
self.toLog.append("[{0}]:\n{1}".format(datetime.datetime.now(), message)) logger.log(priority, message)
# pass
# logging.log(priority, message)
# self.toLog.append("[{0}]:\n{1}".format(datetime.datetime.now(), message))
async def fsInit(self):
# TODO: Flesh out and properly link everything
if not os.path.exists("data"):
os.makedirs("data")
if not os.path.exists("data/" + nodeNickname):
os.makedirs("data/" + nodeNickname)
async def monitor(self): async def monitor(self):
global tuiOb global tuiOb
@ -154,8 +299,8 @@ class Node:
memmb = self.proc.memory_info().rss / (1024 * 1024) memmb = self.proc.memory_info().rss / (1024 * 1024)
memmb = round(memmb, 2) memmb = round(memmb, 2)
cpup = self.proc.cpu_percent(interval=1) cpup = self.proc.cpu_percent(interval=1)
self.cLog( logger.log(
20, 10,
" MEM: {0} mB | CPU: {1} %".format( " MEM: {0} mB | CPU: {1} %".format(
memmb, memmb,
cpup, cpup,
@ -164,7 +309,7 @@ class Node:
tuiOb.do_set_cpu_percent(float(cpup)) tuiOb.do_set_cpu_percent(float(cpup))
tuiOb.do_set_mem(memmb) tuiOb.do_set_mem(memmb)
else: else:
self.cLog(20, "No TUI object, waiting 5 seconds...") logger.log(20, "No TUI object, waiting 5 seconds...")
await asyncio.sleep(5) await asyncio.sleep(5)
async def spongeListen(self): async def spongeListen(self):
@ -180,12 +325,21 @@ class Node:
We use a common technique here that calls the function from our preloaded actions via dictionary entry We use a common technique here that calls the function from our preloaded actions via dictionary entry
""" """
while True: while True:
while len(self.todo) >= 1: # logger.log(10, "Sponge listening...")
todoNow = self.todo.pop() # logger.log(10, self.todo)
action = todoNow["action"] while (len(self.todo) >= 1):
self.cLog(20, "Action: " + action) try:
data = todoNow["data"] logger.log(10, "In todo")
await self.actions[action](data) todoNow = self.todo[0]
action = todoNow.getAction()
logger.log(20, "Action: " + action)
data = todoNow.getData()
logger.log(20, "Doing action")
await self.actions[action](data)
logger.log(20, "After action")
except Exception:
logger.log(20, traceback.format_exc())
self.todo.pop()
await asyncio.sleep(1) await asyncio.sleep(1)
async def action_sendToPeer(self, data: dict): async def action_sendToPeer(self, data: dict):
@ -205,28 +359,71 @@ class Node:
webui.serve.Server.sendToPeer: Function to actually execute the action webui.serve.Server.sendToPeer: Function to actually execute the action
""" """
self.server.sendToPeer(data["recipient"], data["res"]) logger.info(data)
await self.server.sendToPeer(data["recipient"], data["data"], data["target"])
async def action_sendCatch(self, data: dict): async def action_sendCatch(self, data: dict):
""" """
Get catch and return the data to a peer Get catch and return the data to a peer
""" """
res = self.catch.get(data["head"], data["body"], fins=data["fins"]) # TODO: Seperators
self.server.sendToPeer(data["recipient"], res) if "fins" in data:
res = self.catch.get(data["head"], data["body"], fins=data["fins"])
else:
res = self.catch.get(data["head"], data["body"])
logger.info(res)
# TODO: Manually connect two nodes
r = CatchResponse(
self.onodeID,
000000,
self.onodeID,
data["recipient"],
data["recipientNode"],
self.cryptographyInfo,
res,
pskEncrypt=True
)
await self.oTransceiver.sendMessage(r)
async def action_cLog(self, data: dict):
logger.log(data["priority"], data["message"])
async def action_routeCatch(self, data: dict):
await self.server.sendToPeer(data["recipient"], data["html"], data["target"])
async def action_syncIndex(self, data: dict):
for entry in data["index"]:
self.remoteCatchIndex.addEntry(entry)
async def action_map(self, data: dict): async def action_map(self, data: dict):
# TODO: Normalize node ids to strings
""" """
Map new network data to internal network map Map new network data to internal network map
See Also See Also
-------- --------
Siph.network.Network: Layered graph etwork representation Siph.network.Network: Layered graph network representation
""" """
self.network.addLookup(data["onodeID"], data["mnodeID"]) if self.cryptographyInfo.getRecord("key", data["onodeID"]) == False:
self.cLog(20, "Lookup addition done") logger.log(20, "In map")
self.network.addon(data["onodeID"]) await self.network.addLookup(data["onodeID"], data["mnodeID"])
logger.log(20, "After add lookup")
logger.log(20, "Adding public key")
self.cryptographyInfo.addPublickey(data["onodeID"], data["publicKey"])
logger.log(20, "Public key addition done")
self.network.omap.add_node(data["onodeID"])
logger.log(20, "Added node to outer map")
else:
logger.log(20, "Node already exists skipping addition to map")
if "sessionKey" not in self.cryptographyInfo.getRecord("key", data["onodeID"]).keys():
if not data["dontRespond"]:
#await self.oTransceiver.sendAnnounce(dontRespond=True)
#await asyncio.sleep(180)
h = Handshake(self.nodeInfo.get()["nodeID"], self.nodeInfo.get()["nodeID"], data["onodeID"], data["onodeID"], self.cryptographyInfo, data["onodeID"], self.onodeID)
await self.oTransceiver.sendMessage(h)
async def action_initNodeDH(self, data: dict):
async def action_initCryptography(self, data: dict):
""" """
Initialize diffie hellman key exchange Initialize diffie hellman key exchange
@ -234,101 +431,92 @@ class Node:
-------- --------
Cryptography.DHEFern.DHEFern: End to end encryption functionality Cryptography.DHEFern.DHEFern: End to end encryption functionality
""" """
if self.cryptographyInfo.getRecord("key", data["onodeID"]) == False: # TODO: Response message
await self.oTransceiver.initNodeDH( # TODO: Initialize if not initialized
self.cryptographyInfo, int(data["mnodeID"]), data["onodeID"] self.cryptographyInfo.sessionSetup(data["yctx"]["sourceNode"]["val"], data["ephemeralKey"])
) logger.log(20, "Cryptography handshake complete")
# TODO: Now encrypt all communications node to node with session encryption
# TODO: Expiration?
async def action_keyDeriveDH(self, data: dict): async def action_hop(self, data):
"""
Derive key via diffie hellman key exchange
"""
try: try:
self.cryptographyInfo.keyDerive( r = None
data["publicKey"], if data["method"] == "get":
self.cryptographyInfo.getSalt(), r = hopper.get(
data["recipientNode"], data["url"],
data["params"], params=data["parameters"],
) followTags=["img", "script", "link"],
)
elif data["method"] == "post":
r = hopper.post(data["url"], params=data["parameters"])
if r != None:
r = HopperResponse(
self.onodeID,
000000,
self.onodeID,
data["recipient"],
data["recipientNode"],
r,
self.cryptographyInfo,
)
await self.oTransceiver.sendMessage(r)
except: except:
self.cLog(30, traceback.format_exc()) logger.log(30, traceback.format_exc())
async def action_routeHop(self, data: dict):
await self.server.sendToPeer(data["recipient"], data["res"], data["target"])
async def logPassLoop(): async def action_addPSK(self, data):
""" # TODO: Switch to credential
Loop to pass logs up to the TUI self.cryptographyInfo.createEmpty(data["nodeID"])
self.cryptographyInfo.update(data["nodeID"], {"PSK": data["PSK"]})
See Also
--------
ui.TUI: TUI implementation
"""
global tuiOb, nodeOb
while True:
await asyncio.sleep(1)
if tuiOb == None or nodeOb == None:
await asyncio.sleep(1)
elif tuiOb.done:
tuiOb.exit()
os.system("reset")
print("Terminating PierMesh service...")
nodeOb.proc.terminate()
else:
ctoLog = [l for l in nodeOb.toLog]
for l in ctoLog:
tuiOb.do_write_line(l)
nodeOb.toLog.pop()
async def main(): async def main():
""" """
Main method for running the PierMesh service Kick on main method for running the PierMesh service
""" """
global nodeOb
try: try:
nodeOb = Node() nodeOb = Node()
nodeOb.cLog(20, "Starting up") await nodeOb.main()
nodeOb.cLog(20, "Staggering {0} seconds, please wait".format(sys.argv[4])) except:
time.sleep(int(sys.argv[4])) logger.log(20, traceback.format_exc())
nodeOb.oTransceiver = Transceiver(
sys.argv[1],
nodeOb.sponge, def initLogger(nodeName, tolog):
nodeOb.onodeID, global logger
nodeOb.cache, logger = logging.getLogger(__name__)
nodeOb.catch, logger.propagate = False
nodeOb.cryptographyInfo, logger.setLevel(logging.DEBUG)
nodeOb.cLog,
) vh = VHandler(logging.DEBUG, tolog)
nodeOb.server = Server(
nodeOb.oTransceiver, formatter = logging.Formatter(
nodeOb.catch, '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
nodeOb.onodeID,
nodeOb.network, vh.setFormatter(formatter)
nodeOb.cLog,
) logger.addHandler(vh)
nodeOb.mTasks["list"] = asyncio.create_task(nodeOb.spongeListen())
await asyncio.sleep(1) dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
nodeOb.mTasks["pct"] = asyncio.create_task(nodeOb.oTransceiver.progressCheck())
await asyncio.sleep(1) fh = logging.FileHandler(f"logs/{nodeName}_{dt}.log")
nodeOb.mTasks["mon"] = asyncio.create_task(nodeOb.monitor()) fh.setFormatter(formatter)
await asyncio.sleep(1)
nodeOb.mTasks["announce"] = asyncio.create_task(nodeOb.oTransceiver.announce()) logger.addHandler(fh)
await asyncio.sleep(1)
await nodeOb.server.app.start_server(port=int(sys.argv[2]), debug=True)
except Exception:
logging.log(20, traceback.format_exc())
if __name__ == "__main__": if __name__ == "__main__":
try: try:
mainThread = threading.Thread(target=asyncio.run, args=(main(),)) tolog = queue.Queue()
initLogger(nodeNickname, tolog)
mainThread = threading.Thread(target=uvloop.run, args=(main(),))
mainThread.start() mainThread.start()
lplThread = threading.Thread(target=asyncio.run, args=(logPassLoop(),)) # TODO: Waiting for this to finish when no tui
lplThread.start() # TODO: Logging to screen when no tui
tuiOb = TUI() if showTUI:
tuiOb.nodeOb = nodeOb tlog.runLogUI(tolog, nodeNickname)
tuiOb.run()
except: except:
try: print(traceback.format_exc())
nodeOb.cLog(30, traceback.format_exc()) os._exit(1)
except:
logging.log(30, traceback.format_exc())

View File

@ -1 +1 @@
python run.py /dev/ttyACM1 5000 server2.info 0 falin python run.py -d /dev/ttyACM1 -p 5001 -s 75 -n falin -k jgf765!FS0+6 -o True

1
src/setup.fish Normal file
View File

@ -0,0 +1 @@
set TERM xterm-256color

1
src/setup.sh Normal file
View File

@ -0,0 +1 @@
TERM=xterm-256color

296
src/stale/WhaleSong.dhefern.py Executable file
View File

@ -0,0 +1,296 @@
import base64
import os
from cryptography.fernet import Fernet
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import dh
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
from cryptography.hazmat.primitives.serialization import (
Encoding,
NoEncryption,
ParameterFormat,
PublicFormat,
PrivateFormat,
)
import cryptography.hazmat.primitives.serialization as Serialization
import msgpack
from Daisy.Store import Store
# TODO: Different store directories per node
class DHEFern:
"""
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py>`__
Attributes
----------
cLog
Method reference to `run.Node.cLog` so we can log to the ui from here
loadedParams: dict
In memory representations of cryptography parameters
loadedKeys: dict
In memory representations of cryptography keys
nodeNickname: str
Name of node for isolating configs when running multiple nodes
cache: Components.daisy.Cache
Daisy cache for use in storing cryptography information
publicKey
Public key for node
privateKey
Private key for node
"""
def __init__(self, cache, nodeNickname, cLog):
"""
Parameters
----------
cache: Components.daisy.Cache
Reference to the node instances Daisy cache
nodeNickname: str
Node nickname for record storage
cLog
Reference to `run.Node.cLog`
"""
self.cLog = cLog
self.stores = {}
self.loadedParams = {}
self.loadedKeys = {}
self.nodeNickname = nodeNickname
self.cache = cache
if os.path.exists("daisy/cryptography/{0}/param".format(nodeNickname)) == False:
self.initStore("param")
else:
self.stores["param"] = Store("param", "cryptography", nodeNickname)
self.params = self.loadParamBytes(self.stores["param"].get()["self"])
self.cLog(20, "Param store initialized")
if os.path.exists("daisy/cryptography/{0}/key".format(nodeNickname)) == False:
self.cLog(20, "Key store DNE, initializing")
self.initStore("key")
self.genKeyPair()
else:
self.cLog(20, "Key store exists, loading")
self.stores["key"] = Store("key", "cryptography", nodeNickname)
self.cLog(20, "Store loaded")
# tks = self.stores["key"].get()
# self.publicKey = tks["self"]["publicKey"]
# self.privateKey = tks["self"]["privateKey"]
self.cLog(20, "Key store initialized")
def checkInMem(self, store: str, nodeID: str):
"""
Check if parameters or keys are loaded for node of nodeID
Parameters
----------
store: str
Whether to check loaded keys or parameters
"""
if store == "param":
return nodeID in self.loadedParams.keys()
elif store == "key":
return nodeID in self.loadedKeys.keys()
def loadRecordToMem(self, store: str, nodeID: str):
"""
Load record of nodeID from store to either keys or pameters
"""
r = self.getRecord(store, nodeID)
if r == False:
self.cLog(
30, "Tried to load nonexistent {0} for node {1}".format(store, nodeID)
)
return False
elif self.checkInMem(store, nodeID):
self.cLog(10, "{0}s already deserialized, skipping".format(store))
else:
if store == "param":
self.loadedParams[nodeID] = self.loadParamBytes(r)
elif store == "key":
self.loadedKeys[nodeID] = {
"publicKey": Serialization.load_pem_public_key(r["publicKey"]),
"privateKey": Serialization.load_pem_private_key(
r["privateKey"], None
),
}
return True
def getRecord(self, store: str, key: str):
"""
Get record from store: store with key: key
"""
r = stores[store].getRecord(key)
if r == False:
self.cLog(20, "Record does not exist")
return False
else:
return r
def initStore(self, store: str):
"""
Initialize store: store
"""
self.stores[store] = Store(store, "cryptography", self.nodeNickname)
if store == "param":
self.genParams()
self.stores[store].update("self", self.getParamsBytes(), recur=False)
elif store == "key":
self.stores[store].update("self", {}, recur=False)
else:
self.cLog(30, "Store not defined")
def genParams(self):
"""
Generate Diffie Hellman parameters
"""
params = dh.generate_parameters(generator=2, key_size=2048)
self.params = params
return params
def getParamsBytes(self):
"""
Get bytes encoded from self.parameters (TODO: Encode from store)
"""
return self.params.parameter_bytes(Encoding.PEM, ParameterFormat.PKCS3)
def loadParamBytes(self, pemBytes: bytes):
"""
Load parameters to self.params from given bytes (TODO: Load from store)
"""
self.params = Serialization.load_pem_parameters(pemBytes)
return self.params
def genKeyPair(self, paramsOverride=False, setSelf: bool = True):
"""
Generate public and private keys from self.params (TODO: Gen from passed params)
paramsOverride
False or parameters to use (TODO)
setSelf: bool
Whether to set self.privateKey and self.publicKey
"""
privateKey = self.params.generate_private_key()
if setSelf:
self.privateKey = privateKey
publicKey = privateKey.public_key()
if setSelf:
self.publicKey = publicKey
self.stores["key"].update(
"self",
{
"publicKey": self.publicKey.public_bytes(
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
),
"privateKey": self.privateKey.private_bytes(
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
),
},
)
return [privateKey, publicKey]
else:
publicKey = publicKey.public_bytes(
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
)
privateKey = privateKey.private_bytes(
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
)
return [privateKey, publicKey]
def keyDerive(self, pubKey: bytes, salt: bytes, nodeID: str, params: bytes):
"""
Derive shared key using Diffie Hellman
pubKey: bytes
Public key
nodeID: str
PierMesh node ID
params: bytes
Encryption parameters
"""
if self.checkInMem("param", nodeID) == False:
if self.getRecord("param", nodeID) == False:
self.updateStore("param", nodeID, params, recur=False)
self.loadRecordToMem("param", nodeID)
self.cLog(20, "Precheck done for key derivation")
# TODO: Load them and if private key exists load it, otherwise generate a private key
if self.checkInMem("key", nodeID) == False:
if self.getRecord("key", nodeID) == False:
privateKey, publicKey = self.genKeyPair(setSelf=False)
self.updateStore(
"key", nodeID, {"publicKey": publicKey, "privateKey": privateKey}
)
self.loadRecordToMem("key", nodeID)
sharedKey = self.loadedKeys[nodeID]["privateKey"].exchange(
Serialization.load_pem_public_key(pubKey)
)
# Perform key derivation.
self.cLog(20, "Performing key derivation")
derivedKey = HKDF(
algorithm=hashes.SHA256(), length=32, salt=salt, info=b"handshake data"
).derive(sharedKey)
self.cLog(20, "Derived key")
ederivedKey = base64.urlsafe_b64encode(derivedKey)
tr = self.getRecord("key", nodeID)
tr["derivedKey"] = ederivedKey
self.updateStore("key", nodeID, tr)
self.cLog(20, "Done with cryptography store updates")
return ederivedKey
def getSalt(self):
"""
Get random salt
"""
return os.urandom(16)
# TODO: Build in transport security (node/node)
def encrypt(self, data, nodeID: str, isDict: bool = True):
"""
Do Fernet encryption
data
Either bytes or dict to encrypt
isDict: bool
Whether data is a dictionary
"""
r = self.getRecord("key", nodeID)
if r == False:
self.cLog(20, "Node {0} not in keystore".format(nodeID))
return False
else:
derivedKey = r["derivedKey"]
fernet = Fernet(derivedKey)
if isDict:
data = msgpack.dumps(data)
token = fernet.encrypt(data)
return token
def decrypt(self, data, nodeID: str):
"""
Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load)
"""
r = self.getRecord("key", nodeID)
if r == False:
self.cLog(20, "No record of node " + nodeID)
return False
elif not "derivedKey" in r.keys():
self.cLog(20, "No key derived for node " + nodeID)
return False
else:
fernet = Fernet(self.getRecord("key", nodeID)["derivedKey"])
return msgpack.loads(fernet.decrypt(data))

View File

@ -18,7 +18,7 @@ class Router:
self.network = Network() self.network = Network()
self.catch = Catch(walk=True) self.catch = Catch(walk=True)
self.cache = Cache(walk=True) self.cache = Cache(walk=True)
self.cLog(10, "Loading server info") logger.log(10, "Loading server info")
self.serverInfo = self.cache.get(nfpath) self.serverInfo = self.cache.get(nfpath)
if self.serverInfo == False: if self.serverInfo == False:
self.cache.create(nfpath, {"nodeID": random.randrange(0, 1000000)}) self.cache.create(nfpath, {"nodeID": random.randrange(0, 1000000)})

174
src/tlog.py Normal file
View File

@ -0,0 +1,174 @@
import curses
from curses import wrapper
import os
import logging
import queue
import shutil
import time
import psutil
# TODO: Multi node mode
# TODO: Tab per node
class VHandler(logging.Handler):
tolog = queue.Queue()
def __init__(self, level, tolog):
super().__init__(level)
self.tolog = tolog
def emit(self, record):
r = self.format(record)
self.tolog.put([r, record.levelno])
"""
def initLogger(nodeName, tolog):
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# Is tolog variable creating an error that cant be logged
# and logging is defaulting
# Check if this reference is causing an issue
vh = VHandler(logging.DEBUG, tolog)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
vh.setFormatter(formatter)
logger.addHandler(vh)
dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
fh = logging.FileHandler(f"logs/{nodeName}_{dt}.log")
fh.setFormatter(formatter)
logger.addHandler(fh)
return logger
"""
def logUI(stdscr, tolog, nodeNickname):
logcache = []
height, width = shutil.get_terminal_size((49, 27))
if height < 28 or height < 28:
print("Console too small or couldnt retrieve size, please switch to no tui mode, exiting...")
exit()
logger = logging.getLogger("__main__." + __name__)
p = psutil.Process(os.getpid())
curses.start_color()
stdscr.keypad(True)
stdscr.nodelay(True)
curses.init_color(9, 200, 200, 200)
grey = 9
curses.init_pair(1, curses.COLOR_WHITE, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_BLACK, curses.COLOR_GREEN)
curses.init_pair(3, curses.COLOR_WHITE, grey)
curses.init_color(curses.COLOR_WHITE, 1000, 1000, 1000)
curses.init_color(curses.COLOR_YELLOW, 1000, 1000, 0)
curses.init_color(curses.COLOR_GREEN, 0, 1000, 0)
curses.init_color(logging.DEBUG, 0, 1000, 0)
curses.init_pair(logging.DEBUG, 0, logging.DEBUG)
curses.init_color(logging.INFO, 1000, 1000, 1000)
curses.init_pair(logging.INFO, 0, logging.INFO)
curses.init_pair(logging.WARNING, 0, curses.COLOR_YELLOW)
curses.init_pair(logging.ERROR, curses.COLOR_WHITE, curses.COLOR_RED)
curses.init_color(logging.CRITICAL, 1000, 0, 0)
curses.init_pair(logging.CRITICAL, curses.COLOR_WHITE, logging.CRITICAL)
icon = []
with open("piermesh-mini.ascii", "r") as f:
icon = f.read().split("\n")
stdscr.bkgd(' ', curses.color_pair(1))
stdscr.clear()
iwin = curses.newwin(13, 50, 25, 0)
iwin.bkgd(" ", curses.color_pair(3))
for it, line in enumerate(icon[:-1]):
iwin.addstr(it+1, 0, " " + line, curses.color_pair(3))
iwin.addstr(10, 2, " Keys: q to abort, ↑ scroll up", curses.color_pair(2))
iwin.addstr(11, 2, " ↓ scroll down ", curses.color_pair(2))
head = curses.newwin(4, 50, 0, 0)
head.bkgd(" ", curses.color_pair(3))
head.addstr(1, 1, "PierMesh TUI", curses.color_pair(3))
head.addstr(2, 1, "Logs:", curses.color_pair(3))
head.border(' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ')
logpad = curses.newpad(1000, 300)
logpad.bkgdset(" ", curses.color_pair(2))
start = 0
gen = 0
lastr = time.time()
while True:
if gen == 0 or ((time.time()-lastr) > 2):
lastr = time.time()
stdscr.addstr(23, 0, " System usage: ", curses.color_pair(3))
mem = round(p.memory_info().rss/(1024*1024), 2)
cpu = p.cpu_percent(interval=0.1)
stdscr.addstr(
24,
0,
f" MEM: {mem} Mb CPU: {cpu}% ",
curses.color_pair(2)
)
if tolog.empty() != True:
while True:
logcache.insert(0, tolog.get())
tolog.task_done()
if tolog.qsize() < 1:
break
if len(logcache) > 100:
logcache = logcache[:100]
logpad.clear()
nextOffset = 0
for it, message in enumerate(logcache):
msg = message[0]
if len(msg) > width:
msgPartA = msg[:width]
msgPartB = msg[width:]
if len(msgPartB) > width:
msgPartB = msgPartB[:width]
logpad.addstr(
it+nextOffset, 0, " " + msgPartA + " ", curses.color_pair(message[1]))
logpad.addstr(
it+nextOffset+1, 0, " " + msgPartB + " ", curses.color_pair(message[1]))
nextOffset = 1
else:
logpad.addstr(
it+nextOffset, 0, " " + msg + " ", curses.color_pair(message[1]))
nextOffset = 0
logpad.refresh(start, 0, 4, 0, 22, width)
stdscr.refresh()
if gen < 1:
iwin.refresh()
head.refresh()
gen += 1
ch = stdscr.getch()
if ch == ord("q"):
curses.nocbreak()
stdscr.keypad(False)
curses.echo()
curses.endwin()
os._exit(1)
elif ch == curses.KEY_UP:
if start != 0:
start -= 1
elif ch == curses.KEY_DOWN:
if start < tolog.qsize():
start += 1
def runLogUI(tolog, nodeNickname):
wrapper(logUI, tolog, nodeNickname)

0
src/tui.py Normal file
View File

View File

@ -1,5 +1,6 @@
# TODO: DEPRECATE
from textual.app import App, ComposeResult from textual.app import App, ComposeResult
from textual.widgets import Log, Label, Footer, Header, ProgressBar from textual.widgets import Log, Label, Footer, Header, ProgressBar, Input, Button
from textual.binding import Binding from textual.binding import Binding
from textual.containers import Horizontal, Vertical from textual.containers import Horizontal, Vertical
import sys, os import sys, os
@ -24,6 +25,7 @@ class TUI(App):
Whether the TUI has been killed Whether the TUI has been killed
""" """
todo = []
visibleLogo = True visibleLogo = True
nodeOb = None nodeOb = None
done = False done = False
@ -70,7 +72,15 @@ class TUI(App):
Load the ascii art for display on the left label Load the ascii art for display on the left label
""" """
yield Header(icon="P") yield Header(icon="P")
yield Label(ascii, classes="largeLabel", name="logo", id="logo") yield Vertical(
Label(ascii, classes="largeLabel", name="logo", id="logo"),
Label("Add/set pre shared key for node\n"),
Label("Node ID:"),
Input(placeholder="000000", type="integer", max_length=6, name="pskNodeID", id="pskNodeID"),
Label("PSK:"),
Input(type="text", max_length=6, name="psk", id="psk"),
Button("Add/set PSK", name="addPSK", id="addPSK"),
)
yield Vertical( yield Vertical(
Log(auto_scroll=True, classes="baseLog"), Log(auto_scroll=True, classes="baseLog"),
Label("CPU usage:", name="cpul", id="cpul"), Label("CPU usage:", name="cpul", id="cpul"),
@ -79,6 +89,16 @@ class TUI(App):
) )
yield Footer() yield Footer()
def on_button_pressed(self, event: Button.Pressed) -> None:
if event.button.id == "addPSK":
self.todo.append({
"action": "addPSK",
"data": {
"nodeID": self.query_one("#pskNodeID").value.zpad(6),
"PSK": self.query_one("#PSK").value
}
})
def do_write_line(self, logLine: str): def do_write_line(self, logLine: str):
""" """
Write line to the logs panel Write line to the logs panel
@ -127,3 +147,4 @@ class TUI(App):
if __name__ == "__main__": if __name__ == "__main__":
app = TUI() app = TUI()
app.run() app.run()