Compare commits

..

1 Commits
main ... dev

Author SHA1 Message Date
Agie Ashwood 66f3cbe401 Prototype fast forward 2024-11-22 20:34:39 -07:00
85 changed files with 3134 additions and 780 deletions

12
.gitignore vendored
View File

@ -8,3 +8,15 @@ src/Splash/res/js/node_modules/
src/daisy/
src/catch/
src/logs/
zims/
project.matrix
piermesh.nvim
piermesh
node00/*
node00/
*.index
*.log
*.vim
*.workspace
.workspace
.workspace.backup

View File

@ -1,3 +1,5 @@
home = /usr/bin
include-system-site-packages = false
version = 3.10.12
version = 3.11.2
executable = /usr/bin/python3.11
command = /usr/bin/python3 -m venv /home/ag/Documents/piermesh

View File

@ -42,5 +42,5 @@ This program is free software: you can redistribute it and/or modify it under th
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with this program. If not, see [https://www.gnu.org/licenses/](https://www.gnu.org/licenses/).
You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
```

View File

@ -13,3 +13,7 @@ sphinx
textual
textual-dev
sphinx-markdown-builder==0.6.6
pycryptodome
pyjwt
uvloop
python-lsp-server[all]

14
src/.piermesh Normal file
View File

@ -0,0 +1,14 @@
# DONT TOUCH THIS SECTION UNLESS YOU KNOW WHAT YOURE DOING
[DEFAULT]
Nickname = node00
StartupDelay = 0
WebUIPort = 5000
ShowTUI = True
[OPERATOR_REQUIRED]
TransceiverPort = /dev/ttyACM0
PSK = jgf765!FS0+6
# DO YOUR NON REQUIRED SETTINGS HERE
[OPERATOR_OVERRIDES]

13
src/.piermesh.example Normal file
View File

@ -0,0 +1,13 @@
# DONT TOUCH THIS SECTION UNLESS YOU KNOW WHAT YOURE DOING
[DEFAULT]
Nickname = node00
StartupDelay = 0
WebUIPort = 5000
ShowTUI = True
[OPERATOR_REQUIRED]
# TransceiverPort = /dev/ttyACM0
# DO YOUR SETTINGS HERE
[OPERATOR_OVERRIDES]

0
src/Actions/Actions.py Normal file
View File

0
src/Actions/__init__.py Normal file
View File

View File

@ -1,10 +1,39 @@
from bs4 import BeautifulSoup
import requests
import msgpack
import lzma
from Packets.Message import Message
import base64
import mimetypes
import logging
from Packets.Messages.Protocols.hopper.Response import HopperResponse
logger = logging.getLogger("__main__." + __name__)
def downloadFile(url, text=True, mimeType=None):
fbytes = b""
with requests.get(url, stream=True) as r:
r.raise_for_status()
for chunk in r.iter_content(chunk_size=8192):
fbytes += chunk
if text:
return fbytes.decode("utf-8")
else:
if mimeType == None:
mimeType, encoding = mimetypes.guess_type(url)
if mimeType == None:
raise Error(
"Couldnt guess mime type and none was supplied, cant encode to data url"
)
b64str = base64.b64encode(fbytes).decode("utf-8")
dataUrl = "data:{0};base64,{1}".format(mimeType, b64str)
return dataUrl
def get(url: str, params=None):
def get(url: str, params=None, followTags=None):
"""
http/s get request
@ -14,10 +43,44 @@ def get(url: str, params=None):
params
Requests (library) parameters
followTags
None or list of tags to download the src/href from
"""
logger.debug("Hopping to it")
# TODO: Non blocking requests
# WARN: Do not run self requests until this is fixed
r = requests.get(url, params=params)
r = {"response": r.text, "code": r.status_code}
return Message(lzma.compress(msgpack.dumps(r))).get()
logger.debug("Content retrieved, parsing")
r = {
"response": r.text,
"code": r.status_code,
"content-type": r.headers.get("content-type"),
}
logger.debug("Done parsing")
# TODO: Reject followtags if content type is other then html
if followTags != None:
soup = BeautifulSoup(r["response"], "html.parser")
# TODO: Checking for relative links
for tag in followTags:
if tag in ["img", "video"]:
for elem in soup.find_all(tag):
elem["src"] = downloadFile(elem["src"], text=False)
elif tag in ["link"]:
for elem in soup.find_all(tag):
if elem["rel"] == "stylesheet":
style = downloadFile(elem["href"])
elem.decompose()
soup.head.append_tag(soup.new_tag("style", string=style))
elif tag == "script":
for elem in soup.find_all(tag):
script = downloadFile(elem["src"])
elem["src"] = ""
elem.string = script
r["response"] = soup.text
logger.debug("Done hopping")
return r
def post(url: str, params=None):
@ -33,4 +96,4 @@ def post(url: str, params=None):
"""
r = requests.post(url, data=params)
r = {"response": r.text, "code": r.status_code}
return Message(lzma.compress(msgpack.dumps(r))).get()
return r

7
src/Config/Args.py Normal file
View File

@ -0,0 +1,7 @@
def byFile():
pass
def byPrompt():
pass

22
src/Config/Context.py Normal file
View File

@ -0,0 +1,22 @@
class Context:
def __init__(self, subsets: dict={}, **kwargs):
# Subsets should be a dict of list of value keys
self.ctx = {}
self.subsets = subsets
for key, val in kwargs.items():
self.ctx[key] = {
"val": val,
"type": type(val)
}
def __getitem__(self, idx):
return self.ctx[idx]
def getSubset(self, subset):
if subset in self.subsets.keys():
res = {}
for key in self.subsets[subset]:
res[key] = self.ctx[key]["val"]
return res

0
src/Config/__init__.py Normal file
View File

View File

@ -1,24 +1,26 @@
import base64
import os
from cryptography.fernet import Fernet
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import dh
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
from cryptography.hazmat.primitives.serialization import (
Encoding,
NoEncryption,
ParameterFormat,
PublicFormat,
PrivateFormat,
)
import cryptography.hazmat.primitives.serialization as Serialization
import lzma
import logging
import msgpack
from Crypto.PublicKey import ECC
from Crypto.Hash import SHAKE128
from Crypto.Protocol.DH import key_agreement
from Crypto.Cipher import AES
from Daisy.Store import Store
logger = logging.getLogger("__main__." + __name__)
# TODO: Different store directories per node
# TODO: First time psk transport initiation
# Add this credential manually, its picked up and used when the two nodes try to communicate before the session is encrypted
class DHEFern:
class Transport:
"""
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py>`__
@ -47,7 +49,7 @@ class DHEFern:
Private key for node
"""
def __init__(self, cache, nodeNickname, cLog):
def __init__(self, cache, nodeNickname, daisyCryptography, psk):
"""
Parameters
----------
@ -61,32 +63,41 @@ class DHEFern:
Reference to `run.Node.cLog`
"""
self.cLog = cLog
self.stores = {}
self.loadedParams = {}
self.loadedKeys = {}
self.nodeNickname = nodeNickname
self.cache = cache
if os.path.exists("daisy/cryptography/{0}/param".format(nodeNickname)) == False:
self.initStore("param")
else:
self.stores["param"] = Store("param", "cryptography", nodeNickname)
self.params = self.loadParamBytes(self.stores["param"].get()["self"])
self.cLog(20, "Param store initialized")
self.daisyCryptography = daisyCryptography
if os.path.exists("daisy/cryptography/{0}/key".format(nodeNickname)) == False:
self.cLog(20, "Key store DNE, initializing")
logger.log(20, "Key store DNE, initializing")
self.initStore("key")
self.genKeyPair()
else:
self.cLog(20, "Key store exists, loading")
self.stores["key"] = Store("key", "cryptography", nodeNickname)
self.cLog(20, "Store loaded")
# tks = self.stores["key"].get()
# self.publicKey = tks["self"]["publicKey"]
# self.privateKey = tks["self"]["privateKey"]
self.cLog(20, "Key store initialized")
logger.log(20, "Key store exists, loading")
self.stores["key"] = Store(
"key", "cryptography", nodeNickname, daisyCryptography
)
logger.log(20, "Store loaded")
logger.log(20, "Key store initialized")
srecord = self.getRecord("key", "self")
if srecord == False:
self.stores["key"].createEmpty("self")
self.stores["key"].update(
"self",
{"PSK": self.daisyCryptography.pad(psk).encode("utf-8")},
write=False
)
if "publicKey" not in self.getRecord("key", "self").keys():
self.addPublickey(None, None, forSelf=True)
else:
self.stores["key"].update("self", {
"publicKey": ECC.import_key(
self.getRecord("key", "self")["publicKey"]
)
}, write=False)
def checkInMem(self, store: str, nodeID: str):
def kdf(self, bytesX):
return SHAKE128.new(bytesX).read(32)
def checkInMem(self, store: str, nodeID: str, checkFieldsExist=[]):
"""
Check if parameters or keys are loaded for node of nodeID
@ -99,7 +110,14 @@ class DHEFern:
if store == "param":
return nodeID in self.loadedParams.keys()
elif store == "key":
return nodeID in self.loadedKeys.keys()
record = self.getRecord("key", nodeID)
if record != False:
for field in checkFieldsExist:
if not (field in record.keys()):
if field == "staticKey":
self.genStaticKey(nodeID)
elif field == "ourEphemeralKey":
self.genOurEphemeralKey(nodeID)
def loadRecordToMem(self, store: str, nodeID: str):
"""
@ -107,15 +125,16 @@ class DHEFern:
"""
r = self.getRecord(store, nodeID)
if r == False:
self.cLog(
logger.log(
30, "Tried to load nonexistent {0} for node {1}".format(store, nodeID)
)
return False
elif self.checkInMem(store, nodeID):
self.cLog(10, "{0}s already deserialized, skipping".format(store))
logger.log(10, "{0}s already deserialized, skipping".format(store))
else:
if store == "param":
self.loadedParams[nodeID] = self.loadParamBytes(r)
"""
elif store == "key":
self.loadedKeys[nodeID] = {
"publicKey": Serialization.load_pem_public_key(r["publicKey"]),
@ -123,54 +142,37 @@ class DHEFern:
r["privateKey"], None
),
}
"""
return True
def getRecord(self, store: str, key: str):
def getRecord(self, store: str, key: str, ephemeral=False):
"""
Get record from store: store with key: key
"""
r = stores[store].getRecord(key)
r = self.stores[store].getRecord(key, ephemeral=ephemeral)
if r == False:
self.cLog(20, "Record does not exist")
logger.log(20, "Record does not exist")
return False
else:
return r
# TODO: Fix stores, URGENT
def initStore(self, store: str):
"""
Initialize store: store
"""
self.stores[store] = Store(store, "cryptography", self.nodeNickname)
self.stores[store] = Store(
store, "cryptography", self.nodeNickname, self.daisyCryptography
)
if store == "param":
self.genParams()
self.stores[store].update("self", self.getParamsBytes(), recur=False)
elif store == "key":
self.stores[store].update("self", {}, recur=False)
else:
self.cLog(30, "Store not defined")
logger.log(30, "Store not defined")
def genParams(self):
"""
Generate Diffie Hellman parameters
"""
params = dh.generate_parameters(generator=2, key_size=2048)
self.params = params
return params
def getParamsBytes(self):
"""
Get bytes encoded from self.parameters (TODO: Encode from store)
"""
return self.params.parameter_bytes(Encoding.PEM, ParameterFormat.PKCS3)
def loadParamBytes(self, pemBytes: bytes):
"""
Load parameters to self.params from given bytes (TODO: Load from store)
"""
self.params = Serialization.load_pem_parameters(pemBytes)
return self.params
def genKeyPair(self, paramsOverride=False, setSelf: bool = True):
def genStaticKey(self, onodeID, paramsOverride=False):
"""
Generate public and private keys from self.params (TODO: Gen from passed params)
@ -180,85 +182,99 @@ class DHEFern:
setSelf: bool
Whether to set self.privateKey and self.publicKey
"""
privateKey = self.params.generate_private_key()
if setSelf:
self.privateKey = privateKey
publicKey = privateKey.public_key()
if setSelf:
self.publicKey = publicKey
staticKey = ECC.generate(curve="p256")
self.stores["key"].update(
"self",
onodeID,
{
"publicKey": self.publicKey.public_bytes(
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
),
"privateKey": self.privateKey.private_bytes(
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
),
"staticKey": staticKey.export_key(
format="PEM", prot_params={"iteration_count": 131072}
)
},
)
return [privateKey, publicKey]
self.stores["key"].update(onodeID, {"staticKey": staticKey}, write=False)
def genOurEphemeralKey(self, onodeID):
ourEphemeralKey = ECC.generate(curve="p256")
self.stores["key"].update(onodeID, {"ourEphemeralKey": ourEphemeralKey}, write=False)
def addPublickey(self, onodeID, publicKey, forSelf=False):
if forSelf:
publicKey = ECC.generate(curve="p256")
self.stores["key"].update("self", {
"publicKey": publicKey.export_key(
format="PEM",
prot_params={"iteration_count": 131072}
)})
self.stores["key"].update("self", {
"publicKey": publicKey
},
write=False)
else:
publicKey = publicKey.public_bytes(
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
# TODO: Fix stores
# self.stores["key"].update(onodeID, {"publicKey": publicKey})
logger.log(20, "Importing keys")
record = self.getRecord("key", onodeID)
if record == False:
self.stores["key"].createEmpty(onodeID)
self.stores["key"].update(onodeID, {"publicKey": publicKey})
self.stores["key"].update(onodeID, {"publicKey": ECC.import_key(publicKey)}, write=False)
def addPeerEphemeralKey(self, onodeID, peerEphemeralKey):
self.stores["key"].update(onodeID, {"peerEphemeralKey": ECC.import_key(peerEphemeralKey)}, write=False)
def sessionSetup(self, onodeID, peerEphemeralKey):
# TODO: Deeper checking before loading
# TODO: Loading existing records
if self.getRecord("key", onodeID) == False:
logger.log(30, "No record, waiting for announce")
else:
self.addPeerEphemeralKey(onodeID, peerEphemeralKey)
self.generateSessionKey(onodeID)
def generateSessionKey(self, onodeID):
# TODO: Gen static key if not exists
# TODO: Gen our ephemeral key if not exists
keysOb = self.getRecord("key", onodeID, ephemeral=True)
if ("publicKey" not in keysOb) or ("staticKey" not in keysOb):
dkeysOb = self.getRecord("key", onodeID)
if ("publicKey" not in keysOb):
self.stores["key"].update(
onodeID, {
"publicKey": ECC.import_key(
dkeysOb["publicKey"]
)
privateKey = privateKey.private_bytes(
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
}, write=False
)
return [privateKey, publicKey]
def keyDerive(self, pubKey: bytes, salt: bytes, nodeID: str, params: bytes):
"""
Derive shared key using Diffie Hellman
pubKey: bytes
Public key
nodeID: str
PierMesh node ID
params: bytes
Encryption parameters
"""
if self.checkInMem("param", nodeID) == False:
if self.getRecord("param", nodeID) == False:
self.updateStore("param", nodeID, params, recur=False)
self.loadRecordToMem("param", nodeID)
self.cLog(20, "Precheck done for key derivation")
# TODO: Load them and if private key exists load it, otherwise generate a private key
if self.checkInMem("key", nodeID) == False:
if self.getRecord("key", nodeID) == False:
privateKey, publicKey = self.genKeyPair(setSelf=False)
self.updateStore(
"key", nodeID, {"publicKey": publicKey, "privateKey": privateKey}
if ("staticKey" not in keysOb):
self.stores["key"].update(
onodeID, {
"staticKey": ECC.import_key(
dkeysOb["staticKey"]
)
self.loadRecordToMem("key", nodeID)
sharedKey = self.loadedKeys[nodeID]["privateKey"].exchange(
Serialization.load_pem_public_key(pubKey)
}, write=False
)
# Perform key derivation.
self.cLog(20, "Performing key derivation")
derivedKey = HKDF(
algorithm=hashes.SHA256(), length=32, salt=salt, info=b"handshake data"
).derive(sharedKey)
self.cLog(20, "Derived key")
ederivedKey = base64.urlsafe_b64encode(derivedKey)
tr = self.getRecord("key", nodeID)
tr["derivedKey"] = ederivedKey
self.updateStore("key", nodeID, tr)
self.cLog(20, "Done with cryptography store updates")
return ederivedKey
def getSalt(self):
"""
Get random salt
"""
return os.urandom(16)
keysOb = self.getRecord("key", onodeID, ephemeral=True)
reget = False
if "staticKey" not in keysOb:
self.genStaticKey(onodeID)
reget = True
if "ourEphemeralKey" not in keysOb:
self.genOurEphemeralKey(onodeID)
reget = True
if reget:
keysOb = self.getRecord("key", onodeID, ephemeral=True)
sessionKey = key_agreement(
static_priv=keysOb["staticKey"],
static_pub=keysOb["publicKey"],
eph_priv=keysOb["ourEphemeralKey"],
eph_pub=keysOb["peerEphemeralKey"],
kdf=self.kdf,
)
self.stores["key"].update(onodeID, {"sessionKey": sessionKey}, write=False)
return sessionKey
# TODO: Build in transport security (node/node)
def encrypt(self, data, nodeID: str, isDict: bool = True):
def encrypt(self, data, nodeID: str, isDict: bool = True, pskEncrypt=False):
"""
Do Fernet encryption
@ -268,29 +284,73 @@ class DHEFern:
isDict: bool
Whether data is a dictionary
"""
r = self.getRecord("key", nodeID)
if r == False:
self.cLog(20, "Node {0} not in keystore".format(nodeID))
return False
else:
derivedKey = r["derivedKey"]
fernet = Fernet(derivedKey)
if nodeID == "-00001" or pskEncrypt:
cipher = AES.new(self.getRecord("key", "self", ephemeral=True)["PSK"], AES.MODE_GCM)
nonce = cipher.nonce
if isDict:
data = msgpack.dumps(data)
token = fernet.encrypt(data)
return token
ciphertext, tag = cipher.encrypt_and_digest(data)
return (ciphertext, nonce, tag)
elif (self.getRecord("key", nodeID)) == False:
logger.log(30, "Node {0} not in keychain".format(nodeID))
return False
else:
r = self.getRecord("key", nodeID, ephemeral=True)
if r == False:
r = self.getRecord("key", "self", ephemeral=True)
logger.info(r)
if "sessionKey" in r.keys():
sessionKey = r["sessionKey"]
cipher = AES.new(sessionKey, AES.MODE_GCM)
nonce = cipher.nonce
if isDict:
data = msgpack.dumps(data)
ciphertext, tag = cipher.encrypt_and_digest(data)
return (ciphertext, nonce, tag)
elif "PSK" in r.keys():
cipher = AES.new(r["PSK"], AES.MODE_GCM)
nonce = cipher.nonce
if isDict:
data = msgpack.dumps(data)
ciphertext, tag = cipher.encrypt_and_digest(data)
return (ciphertext, nonce, tag)
else:
logger.log(20, "Node {0} does not have session key".format(nodeID))
def decrypt(self, data, nodeID: str):
def decrypt(self, data, nodeID: str, nonce, tag):
"""
Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load)
"""
r = self.getRecord("key", nodeID)
if r == False:
self.cLog(20, "No record of node " + nodeID)
return False
elif not "derivedKey" in r.keys():
self.cLog(20, "No key derived for node " + nodeID)
return False
# TODO: Handling existing record
record = self.getRecord("key", nodeID, ephemeral=True)
if (record == False) or ("sessionKey" not in record.keys()):
cipher = AES.new(self.getRecord("key", "self", ephemeral=True)["PSK"], AES.MODE_GCM, nonce=nonce)
data = cipher.decrypt(data)
logger.log(10, data)
#data = msgpack.loads(data)
data = msgpack.loads(lzma.decompress(data))
logger.log(10, "Decrypt/deserialize output")
logger.log(10, data)
return data
# logger.log(20, "Node {0} not in keychain".format(nodeID))
# return False
else:
fernet = Fernet(self.getRecord("key", nodeID)["derivedKey"])
return msgpack.loads(fernet.decrypt(data))
if "sessionKey" in record.keys():
sessionKey = record["sessionKey"]
cipher = AES.new(sessionKey, AES.MODE_GCM, nonce=nonce)
data = cipher.decrypt(data)
data = msgpack.loads(lzma.decompress(data))
return data
elif "PSK" in record.keys():
cipher = AES.new(record["PSK"], AES.MODE_GCM, nonce=nonce)
data = cipher.decrypt(data)
data = msgpack.loads(lzma.decompress(data))
return data
else:
logger.log(20, "Node {0} does not have session key".format(nodeID))
return False

View File

@ -18,6 +18,7 @@ class Cache:
def __init__(
self,
daisyCryptography,
filepaths=None,
cacheFile=None,
path: str = "daisy",
@ -42,27 +43,31 @@ class Cache:
isCatch: bool
Whether this cache is for catchs
"""
self.daisyCryptography = daisyCryptography
self.data = {}
self.path = path
if not os.path.exists(self.path):
os.makedirs(self.path)
if filepaths != None:
for fp in filepaths:
fp = path + "/" + fp
if os.path.isfile(fp):
self.data[fp] = Daisy(fp)
self.data[fp] = Daisy(fp, daisyCryptography)
elif cacheFile != None:
with open(cacheFile, "r") as f:
for fp in f.read().split("\n"):
self.data[fp] = Daisy(fp)
self.data[fp] = Daisy(fp, daisyCryptography)
elif walk:
for root, dirs, files in os.walk(self.path):
for p in dirs + files:
if not (".json" in p):
if not (".md" in p):
tpath = root + "/" + p
self.data[tpath] = Daisy(tpath)
self.data[tpath] = Daisy(tpath, daisyCryptography)
def create(self, path: str, data: dict):
def create(self, path: str, data: dict, remote=False):
"""
Create new record
@ -74,12 +79,16 @@ class Cache:
data: dict
Data to populate record with
"""
if remote == False:
with open(self.path + "/" + path, "wb") as f:
f.write(msgpack.dumps(data))
# logging.log(10, "Done creating record")
self.data[path] = Daisy(self.path + "/" + path)
self.data[path] = Daisy(self.path + "/" + path, self.daisyCryptography)
# logging.log(10, "Done loading to Daisy")
return self.data[path]
else:
self.data[path] = Ref(path, remote)
return self.data[path]
def get(self, path: str):
"""
@ -92,7 +101,7 @@ class Cache:
return self.data[path]
else:
if os.path.exists(self.path + "/" + path):
self.data[path] = Daisy(self.path + "/" + path)
self.data[path] = Daisy(self.path + "/" + path, self.daisyCryptography)
return self.data[path]
else:
# logging.log(10, "File does not exist")

View File

@ -1,7 +1,9 @@
from Daisy.Cache import Cache
from Daisy.Ref import Ref
import os
import random
import uuid
class Catch(Cache):
@ -16,13 +18,23 @@ class Catch(Cache):
catches = {}
def __init__(
self, path: str = "catch", filepaths=None, catchFile=None, walk: bool = False
self,
daisyCryptography,
path: str = "catch",
filepaths=None,
catchFile=None,
walk: bool = False,
):
"""
Basically the same initialization parameters as Catch
"""
super().__init__(
filepaths=filepaths, cacheFile=catchFile, path=path, walk=walk, isCatch=True
daisyCryptography,
filepaths=filepaths,
cacheFile=catchFile,
path=path,
walk=walk,
isCatch=True,
)
# TODO: Fins
@ -34,7 +46,7 @@ class Catch(Cache):
return super().get(path)
# TODO: Rename
def get(self, head: str, tail: str, fins=None):
def get(self, head: str, body: str, fins=None):
"""
Get catch by pieces
@ -49,21 +61,41 @@ class Catch(Cache):
fins
List of (maximum 8 characters) strings at the end of the catch oe None if none
"""
r = self.search({"head": head, "tail": tail})
r = ""
if fins != None and fins != "":
r = self.search({"head": head, "body": body, "fins": fins})
else:
r = self.search({"head": head, "body": body})
if len(r) < 1:
return False
else:
return r[0][1]["html"]
def addc(self, peer, node, seperator, head, tail, data, fins=None):
tnpath = "catch/" + node
if os.path.exists(tnpath) != True:
os.makedirs(tnpath)
def addc(self, peer, node, seperator, head, body, data, fins=None, remote=False):
tnpath = f"catch/{node}"
if os.path.exists(self.path + "/" + tnpath) != True:
os.makedirs(self.path + "/" + tnpath)
tppath = tnpath + "/" + peer
if os.path.exists(tppath) != True:
os.makedirs(tppath)
if os.path.exists(self.path + "/" + tppath) != True:
os.makedirs(self.path + "/" + tppath)
sid = str(random.randrange(0, 999999)).zfill(6)
data["seperator"] = seperator
data["head"] = head
data["tail"] = tail
data["body"] = body
if fins != None:
data["fins"] = fins
res = self.create("{0}/{1}/{2}".format(node, peer, sid), data)
res = self.create("{0}/{1}".format(tppath, sid), data, remote=remote)
return [sid, res]
def genIndex(self, onodeID):
dirList = []
for k, v in self.data.items():
curCatch = {"remoteNode": onodeID}
if type(v.msg) != str:
curCatch = curCatch | v.msg
del curCatch["html"]
dirList.append(curCatch)
return dirList
def mergeIndex(self, remoteIndex):
self.remoteCatchesMap += remoteIndex

10
src/Daisy/Credential.py Normal file
View File

@ -0,0 +1,10 @@
from Daisy.Daisy import Daisy
class Credential(Daisy):
def __init__(self, nodeNickname, credentialName, extension, daisyCryptography):
fname = "data/{0}/{1}.{2}".format(nodeNickname, credentialName, extension)
super().__init__(
fname,
daisyCryptography,
)

View File

@ -0,0 +1,46 @@
from Crypto.Cipher import AES
import traceback
import logging
logger = logging.getLogger("__main__." + __name__)
class SteelPetal:
def __init__(self, key, nonce=None, testData=None):
try:
if nonce == None:
self.cipher = AES.new(self.pad(key).encode("utf-8"), AES.MODE_GCM)
self.nonce = self.cipher.nonce
else:
self.cipher = AES.new(
self.pad(key).encode("utf-8"), AES.MODE_GCM, nonce=nonce
)
self.nonce = nonce
if testData != None:
try:
self.cipher.decrypt(testData)
except:
logger.log(30, traceback.format_exc())
return False
except:
logger.log(30, traceback.format_exc())
def pad(self, key):
BS = AES.block_size
key = key + (BS - len(key) % BS) * chr(BS - len(key) % BS)
return key
def encrypt(self, data):
try:
return self.cipher.encrypt_and_digest(data)
except:
logger.log(20, traceback.format_exec())
return False
def decrypt(self, data):
try:
return self.cipher.decrypt(data)
except:
logger.log(20, traceback.format_exec())
return False

View File

@ -1,12 +1,14 @@
import os
import json
import msgpack
import logging
# TODO: delete
# TODO: propagate json changes to msgpack automatically
# TODO: propagate msgpack changes to cache automatically
# TODO: Indexing
logger = logging.getLogger("__main__." + __name__)
def _json_to_msg(path: str):
"""
@ -40,12 +42,15 @@ class Daisy:
In memory representation
"""
# TODO: Strong encrypt
def __init__(
self,
filepath: str,
daisyCryptography,
templates: dict = {},
template: bool = False,
prefillDict: bool = False,
remote=False,
):
"""
Parameters
@ -62,7 +67,12 @@ class Daisy:
prefillDict: bool
Whether to fill the record with a template
"""
self.remote = False
self.filepath = filepath
if remote != False:
self.remote = True
self.remoteNodeID = remote
else:
if os.path.exists(filepath) != True:
with open(filepath, "wb") as f:
if template != False:
@ -74,10 +84,14 @@ class Daisy:
f.write(msgpack.dumps(t))
self.msg = t
else:
print("No such template as: " + template)
logger.log(20, "No such template as: " + template)
else:
f.write(msgpack.dumps({}))
self.msg = {}
t = {}
if prefillDict != False:
for k in prefillDict.keys():
t[k] = prefillDict[k]
f.write(msgpack.dumps(t))
self.msg = t
elif os.path.isdir(filepath):
self.msg = "directory"
else:
@ -164,26 +178,18 @@ class Daisy:
else:
return None
def loadTemplates(templatePath: str = "templates"):
"""Load templates for prefilling records
def json_to_msg(self, path: str):
"""
Convert json at the path plus .json to a msgpack binary
Parameters
----------
templatePath: str
Path to templates
path: str
Path to json minus the extension
"""
templates = {}
for p in os.listdir(templatePath):
p = templatePath + "/" + p
if os.path.isdir(p):
for ip in os.listdir(p):
ip = p + "/" + ip
if os.path.isdir(ip):
print("Too deep, skipping: " + ip)
else:
templates[ip] = Daisy(ip)
else:
templates[p] = Daisy(p)
self.templates = templates
return templates
rpath = path + ".json"
res = b""
with open(rpath) as f:
res = msgpack.dumps(json.load(f))
with open(path, "wb") as f:
f.write(res)

57
src/Daisy/Index.py Normal file
View File

@ -0,0 +1,57 @@
from Daisy.Daisy import Daisy
class Index(Daisy):
def __init__(
self,
nodeNickname,
daisyCryptography,
prefill=[],
indexedFields=[],
autoIndex=True,
):
# TODO: Load from disk
if autoIndex:
if prefill != []:
if indexedFields == []:
for i in prefill:
# TODO: Value type annotation
# TODO: Value weighting
for k, v in i.items():
indexedFields.append(k)
indexedFields = list(set(indexedFields))
super().__init__(
nodeNickname + ".index",
daisyCryptography,
prefillDict={"_index": prefill, "_fields": indexedFields},
)
def addEntry(self, entry):
index = self.msg["_index"]
index.append(entry)
self.write(override={"_index": index})
def search(self, keydict: dict, strict: bool = True):
"""
Search cache for record for records with values
keydict: dict
Values to search for
strict: bool
Whether to require values match
"""
results = []
for ob in self.msg["_index"]:
if strict and type(ob) != str:
addcheck = False
for k, v in keydict.items():
if k in ob.keys():
if v in ob[k]:
addcheck = True
else:
addcheck = False
break
if addcheck:
results.append(ob)
return results

6
src/Daisy/Ref.py Normal file
View File

@ -0,0 +1,6 @@
from Daisy.Daisy import Daisy
class Ref(Daisy):
def __init__(self, path, remoteNodeID):
super().__init__(path, remote=remoteNodeID)

View File

@ -1,7 +1,12 @@
from Daisy.Daisy import Daisy
import os
import logging
import traceback
logger = logging.getLogger("__main__." + __name__)
# TODO: Higher priority erros
class Store(Daisy):
"""
@ -10,24 +15,51 @@ class Store(Daisy):
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Store.py>`__
"""
def __init__(self, store: str, path: str, nodeNickname: str):
def __init__(self, store: str, path: str, nodeNickname: str, daisyCryptography):
fpath = "daisy/{0}/{1}".format(path, nodeNickname)
cpath = "{0}/{1}/{2}".format(path, nodeNickname, store)
if not os.path.exists(fpath):
os.mkdir(fpath)
super().__init__("daisy/" + cpath)
super().__init__("daisy/" + cpath, daisyCryptography)
self.ephemeral = {}
def update(self, entry: str, data, recur: bool = True):
def createEmpty(self, key):
self.msg[key] = {}
# TODO: Update usages of update where necessary to keep out of mem
def update(self, entry: str, data, recur: bool = True, write=True):
if write:
if recur:
if entry not in self.msg.keys():
self.createEmpty(entry)
for key in data.keys():
self.msg[entry][key] = data[key]
else:
self.msg[entry] = data
self.write()
else:
if recur:
if entry not in self.ephemeral.keys():
self.ephemeral[entry] = {}
for key in data.keys():
self.ephemeral[entry][key] = data[key]
else:
self.ephemeral[entry] = data
def getRecord(self, key: str):
def getRecord(self, key: str, ephemeral=False):
logger.log(30, key)
try:
if ephemeral:
if key in self.ephemeral.keys():
return self.ephemeral[key]
else:
logger.log(20, "Record does not exist")
return False
else:
if key in self.get().keys():
return self.get()[key]
else:
self.cLog(20, "Record does not exist")
logger.log(20, "Record does not exist")
return False
except Exception:
logger.log(30, traceback.format_exc())

View File

@ -39,29 +39,39 @@ class Header(Packet):
packetCount: int,
sender: int,
senderDisplayName: int,
sourceNode: int,
recipient: int,
recipientNode: int,
subpacket: bool = False,
wantFullResponse: bool = False,
packetsClass: int = 0,
pAction: int = -1,
target=True,
):
super().__init__(
"", packetsID=packetsID, packetCount=packetCount, packetsClass=packetsClass
b"", packetsID=packetsID, packetCount=packetCount, packetsClass=packetsClass
)
self.target = target
self.sender = sender
self.senderDisplayName = senderDisplayName
if target:
self.recipient = recipient
self.recipientNode = recipientNode
self.subpacket = subpacket
else:
self.recipient = -1
self.recipientNode = -1
# TODO: Populating with submessage ids
self.submessages = []
self.wantFullResponse = wantFullResponse
self.pAction = pAction
self.sourceNode = sourceNode
self.packetCount = packetCount
def usePreset(self, path: str):
def usePreset(self, path: str, daisyCryptography):
"""
Add preset fields to the packet
"""
preset = Daisy(path)
preset = Daisy(path, daisyCryptography)
for key in preset.get().keys():
self.msg[key] = preset.get()[key]
@ -72,11 +82,13 @@ class Header(Packet):
res = msgpack.loads(super().dump())
res["sender"] = self.sender
res["senderDisplayName"] = self.senderDisplayName
res["sourceNode"] = self.sourceNode
res["recipient"] = self.recipient
res["recipientNode"] = self.recipientNode
res["subpacket"] = self.subpacket
res["submessages"] = self.submessages
res["wantFullResponse"] = self.wantFullResponse
res["packetsClass"] = self.packetsClass
res["pAction"] = self.pAction
res["packetCount"] = self.packetCount
return msgpack.dumps(res)

View File

@ -4,9 +4,15 @@ import lzma
import msgpack
import random
import math
import logging
# DO NOT CHANGE DATA SIZE UNLESS YOU KNOW WHAT YOURE DOING
logger = logging.getLogger("__main__." + __name__)
def dict2bytes(cdict: dict):
return lzma.compress(msgpack.dumps(cdict))
class Message:
"""
@ -25,13 +31,20 @@ class Message:
bytesObject: bytes,
sender: int,
senderDisplayName: int,
sourceNode,
recipient: int,
recipientNode: int,
cryptographyInfo,
packetsClass,
pAction,
dataSize: int = 128,
wantFullResponse: bool = False,
packetsClass: int = 0,
target=True,
subMessage=False,
primaryMessage=None,
pskEncrypt=False
):
# TODO: PSK for usage prior to credentials
"""
Parameters
----------
@ -59,6 +72,11 @@ class Message:
packetsClass: int
Which protocol the packets are using
"""
self.recipientNode = recipientNode
self.target = target
self.subMessage = subMessage
if subMessage:
self.primaryMessage = primaryMessage
if isinstance(bytesObject, list):
packets = [h.Header(bytesObject[0])]
for packet in bytesObject:
@ -75,10 +93,18 @@ class Message:
# Data passed in by peers should already have been e2ee encrypted by SubtleCrypto
# Transport encryption
# bytesObject = lzma.compress(bytesObject, str(recipientNode).zfill(6), isDict=False)
bytesObject = cryptographyInfo.encrypt(bytesObject, self.no)
if subMessage == False:
bytesObject, nonce, tag = cryptographyInfo.encrypt(
bytesObject, str(recipientNode).zfill(6), isDict=False, pskEncrypt=pskEncrypt
)
logger.log(10, bytesObject)
self.nonce = nonce
self.tag = tag
packets = []
self.packetsID = random.randrange(0, 999999)
pnum = 1
# if subMessage:
dataSize = 80
blen = math.ceil(len(bytesObject) / dataSize)
tb = b""
for it in range(blen):
@ -86,11 +112,25 @@ class Message:
b = bytesObject[it * dataSize :]
else:
b = bytesObject[it * dataSize : (it * dataSize + dataSize)]
if subMessage:
packets.append(
p.Packet(
b,
self.packetsID,
pnum,
packetsClass=packetsClass,
primaryMessage=primaryMessage,
)
)
else:
packets.append(
p.Packet(b, self.packetsID, pnum, packetsClass=packetsClass)
)
pnum += 1
tb += b
if subMessage:
pass
else:
packets.insert(
0,
h.Header(
@ -98,18 +138,26 @@ class Message:
pnum,
sender,
senderDisplayName,
sourceNode,
recipient,
recipientNode,
wantFullResponse=wantFullResponse,
packetsClass=packetsClass,
pAction=pAction,
target=target,
),
)
self.fullPackets = [p for p in packets]
if subMessage:
pnum -= 1
for it in range(pnum):
packet = msgpack.loads(packets[it].dump())
packet["packetCount"] = pnum
packets[it] = msgpack.dumps(packet)
self.packets = packets
def get(self) -> list[p.Packet]:
@ -118,12 +166,33 @@ class Message:
"""
return self.packets
def reassemble(self, completedMessage: dict):
def reassemble(self, completedMessage: dict, cryptographyInfo, subMessage=False, yctx=None, packetCount=None):
"""
Reassemble packets from a completed message in `Sponge.base`
"""
data = b""
for it in range(1, int(completedMessage["packetCount"])):
sourceNode = None
# TODO: Fix reassembly for primary
if subMessage:
sourceNode = yctx["sourceNode"]["val"]
for it in range(1, packetCount+1):
data += completedMessage["data"][completedMessage["dataOrder"].index(it)]
res = msgpack.loads(lzma.decompress(data))
return res
data = msgpack.loads(lzma.decompress(data))
logger.log(10, data)
logger.log(10, completedMessage["data"])
logger.log(10, completedMessage["dataOrder"])
else:
# TODO: Cryptography setup
packetCount = int(completedMessage.yctx["packetCount"]["val"])
sourceNode = completedMessage.yctx["sourceNode"]["val"]
logger.log(10, completedMessage.data)
for it in range(1, packetCount):
if it in completedMessage.dataOrder:
data += completedMessage.data[completedMessage.dataOrder.index(it)]
logger.log(10, "pre decrypt")
logger.log(10, data)
data = cryptographyInfo.decrypt(
data, sourceNode, completedMessage.nonce, completedMessage.tag
)
# data = msgpack.loads(lzma.decompress(data))
return data

View File

@ -0,0 +1 @@
# WARNING: DO NOT TRY TO POKE A BARNACLE

View File

@ -0,0 +1,27 @@
from Packets.Message import Message
import Packets.Message
class Bubble(Message):
def __init__(
self,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
data,
):
bytesOb = Packets.Message.dict2bytes({"data": data, "recipient": recipient, "target": "bubble"})
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
2,
0,
)

View File

@ -0,0 +1,29 @@
from Packets.Message import Message
import Packets.Message
class IndexSync(Message):
def __init__(
self,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
index,
target=False
):
bytesOb = Packets.Message.dict2bytes({"index": index})
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
1,
2,
target=target
)

View File

@ -0,0 +1,31 @@
from Packets.Message import Message
import Packets.Message
class CatchRequest(Message):
def __init__(
self,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
head,
body,
fins,
pskEncrypt=False
):
bytesOb = Packets.Message.dict2bytes({"head": head, "body": body, "fins": fins, "recipient": sender, "recipientNode": sourceNode})
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
1,
0,
pskEncrypt=pskEncrypt
)

View File

@ -0,0 +1,29 @@
from Packets.Message import Message
import Packets.Message
class CatchResponse(Message):
def __init__(
self,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
html,
pskEncrypt=False
):
bytesOb = Packets.Message.dict2bytes({"html": html, "recipient": recipient, "target": "catch"})
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
1,
1,
pskEncrypt=pskEncrypt
)

View File

@ -0,0 +1,46 @@
from Packets.Message import Message
import Packets.Message
# TODO: Send with psk encryption
class Handshake(Message):
def __init__(
self, sender, senderID, recipient, recipientNode, cryptographyInfo, onodeID, sourceNode
):
publicKey = None
ephemeralKey = None
record = cryptographyInfo.getRecord("key", "self")
if record != False:
if "publicKey" in record.keys():
publicKey = record["publicKey"]
else:
raise Exception("Public key missing for node")
if "ourEphemeralKey" in record.keys():
ephemeralKey = record["ourEphemeralKey"]
else:
cryptographyInfo.genOurEphemeralKey(onodeID)
record = cryptographyInfo.getRecord("key", onodeID, ephemeral=True)
ephemeralKey = record["ourEphemeralKey"].export_key(
format="PEM",
prot_params={"iteration_count": 131072}
)
if "staticKey" not in record.keys():
cryptographyInfo.genStaticKey(onodeID)
else:
raise Exception("Node does not exist")
bytesOb = Packets.Message.dict2bytes(
{"ephemeralKey": ephemeralKey}
)
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
3,
0,
pskEncrypt=True
)

View File

@ -0,0 +1,29 @@
from Packets.Message import Message
import Packets.Message
class HopperRequest(Message):
def __init__(
self,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
url,
params,
method,
cryptographyInfo,
):
bytesOb = Packets.Message.dict2bytes({"url": url, "parameters": params, "method": method, "recipient": sender, "recipientNode": sourceNode})
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
5,
0,
)

View File

@ -0,0 +1,23 @@
from Packets.Message import Message
import Packets.Message
class HopperResponse(Message):
def __init__(
self, sender, senderID, sourceNode, recipient, recipientNode, response, cryptographyInfo
):
bytesOb = Packets.Message.dict2bytes({"res": response, "recipient": recipient, "target": "hopper"})
# bytesOb = cryptographyInfo.encrypt(bytesOb, recipientNode)
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
5,
1
)

View File

@ -0,0 +1,36 @@
from Packets.Message import Message
import Packets.Message
import logging
logger = logging.getLogger("__main__." + __name__)
# TODO: Add public key
class AnnounceMessage(Message):
def __init__(
self,
sender,
senderID,
sourceNode,
cryptographyInfo,
mapping,
):
mapping["publicKey"] = cryptographyInfo.getRecord("key", "self")["publicKey"]
recipient = -1
recipientNode = -1
bytesOb = Packets.Message.dict2bytes(mapping)
logger.log(10, "Mapping bytes")
logger.log(10, bytesOb)
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
0,
0,
target=False,
)

View File

@ -0,0 +1,20 @@
# Template for a Protocol message
from Packets.Message import Message
import Packets.Message
class Template(Message):
def __init__(
self, sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo
):
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
0,
0,
)

View File

View File

@ -32,6 +32,7 @@ class Packet:
packetNumber=False,
packetCount: int = 1,
packetsClass: int = -1,
primaryMessage=None,
):
if packetsID == False:
self.packetsID, self.packetNumber, self.data, self.packetsClass = (
@ -42,14 +43,7 @@ class Packet:
self.packetNumber = packetNumber
self.data = data
self.packetsClass = packetsClass
if packetsClass != -1:
pass
"""template = Daisy("daisy/packet_templates/template.lookup").get()[
str(packetsClass).zfill(2)
]
edata = Daisy("daisy/packet_templates/" + template)
for key in edata.get().keys():
self.data[key] = edata.get()[key]"""
self.primaryMessage = primaryMessage
def parsePayload(data):
"""
@ -75,6 +69,8 @@ class Packet:
}
if res["data"] == "":
res.pop("data")
if self.primaryMessage != None:
res["primaryMessage"] = self.primaryMessage
ores = msgpack.dumps(res)
# logging.log(20, "Packet size: " + str(sys.getsizeof(ores)))
return ores

View File

@ -1,4 +1,39 @@
class SubMessage:
"""
TODO
"""
from Packets.Message import Message
import Packets.Message
import logging
logger = logging.getLogger("__main__." + __name__)
class SubMessage(Message):
def __init__(
self,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
protocolID,
pAction,
data,
target=True,
primaryMessage=None
):
bytesOb = Packets.Message.dict2bytes(data)
logger.log(10, "Submessage bytes")
logger.log(10, bytesOb)
super().__init__(
bytesOb,
sender,
senderID,
sourceNode,
recipient,
recipientNode,
cryptographyInfo,
protocolID,
pAction,
target=target,
subMessage=True,
primaryMessage=primaryMessage
)

33
src/Services/Action.py Normal file
View File

@ -0,0 +1,33 @@
class Action:
def __init__(
self,
action,
data,
sender=None,
senderID=None,
sourceNode=None,
recipient=None,
recipientNode=None,
):
self.action = action
self.data = data
if sender != None:
self.data["sender"] = sender
if senderID != None:
self.data["senderID"] = senderID
if sourceNode != None:
self.data["sourceNode"] = sourceNode
if recipient != None:
self.data["recipient"] = recipient
if recipientNode != None:
self.data["recipientNode"] = recipientNode
def getAction(self):
return self.action
def getData(self):
return self.data

27
src/Services/ToDo.py Normal file
View File

@ -0,0 +1,27 @@
from Services.Action import Action
class ToDo:
def __init__(self):
self.actions = []
# otf here meaning on the fly
# This prefix should be applied to optional
# fields that allow soft defining components
# by taking in their initialization parameters
# and instantiating an object with those
# Q: Should be break camel case here for cleaner
# reading? ex. otf_action looks better then otfAction
def add(self, action, otfAction=None, otfData=None):
if otfAction != None:
if otfData != None:
action = Action(otfAction, otfData)
self.actions.append(action)
def next(self):
if len(self.actions) < 1:
return (False, None)
else:
action = self.actions.pop()
return (action.getAction(), action.getData())

View File

@ -39,7 +39,20 @@ class Network:
self.mimport()
self.lookup = {}
def addLookup(self, onodeID: str, mnodeID: str):
async def addLookup(self, onodeID: str, mnodeID: str):
"""
Adds node to lookup
Parameters
----------
onodeID: str
Internal nodeID
mnodeID: str
MeshTastic nodeID
"""
self.lookup[onodeID] = mnodeID
def syncaddLookup(self, onodeID: str, mnodeID: str):
"""
Adds node to lookup

View File

@ -0,0 +1,5 @@
---
title: Node
---
flowchart LR
id

View File

@ -8,6 +8,21 @@
--palette-two: #A6B08E;
--palette-three: #879B77;
--palette-four: #61805B;
--grid-columns: 8;
--grid-rows: 8;
}
#controls {
display: grid;
grid-template-columns: repeat(var(--grid-columns), auto);
gap: 5%;
}
#render {
display: grid;
grid-template-columns: repeat(var(--grid-columns), 20px);
grid-template-rows: repeat(var(--grid-rows), 20px);
border: 1px solid black;
}
html {
@ -22,6 +37,10 @@ html {
background-color: var(--palette-two);
}
.plankInner {
display: none;
}
ul {
padding: 0;
list-style-type: none !important;
@ -29,9 +48,12 @@ ul {
li {
padding-top: 5px;
text-decoration: none;
list-style-type: none;
}
input[type=text],
input[type=number] {
min-width: 150px;
max-width: 150px;
}

View File

@ -0,0 +1,5 @@
---
title: Node
---
flowchart LR
id

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 707 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.7 KiB

164
src/Splash/res/js/custom.js Normal file
View File

@ -0,0 +1,164 @@
function hopID() {
document.getElementById("hopperPeerID").value =
document.getElementById("peerID").value;
}
function toggle(id) {
var cstyle = document.getElementById(id).style.display;
var nstyle = "block";
if (cstyle == "block") {
nstyle = "none";
}
document.getElementById(id).style.display = nstyle;
}
function addFin() {
var pfin = document.querySelector("#fins").lastElementChild;
var pclone = pfin.cloneNode();
pclone.id = "cfin";
var cid = pclone.firstElementChild.id;
cid = cid.substring(3);
cid = ("" + parseInt(cid) + 1).padStart(2, "0");
cid = fin + cid;
pclone.firstElementChild.id = cid;
pclone.firstElementChild.id = cid;
pfin.insertAdjacentElement("afterend", pclone);
}
function getFins() {
var fins = document.querySelector("#fins").children;
var finsStr = "";
for (var i = 0; i < fins.length; i++) {
var fin = fins[i];
finsStr = finsStr + fin.firstElementChild.value;
if (i != fins.length - 1) {
finsStr = finsStr + 1;
}
}
document.getElementById("finsStr").value = finsStr;
}
function getCatch() {
document.getElementById("catchPeerID").value =
document.getElementById("peerID").value;
getFins();
}
// P2Chat code
function splash(that) {
//alert(parent.id);
//alert(parent.getAttribute("data-coord"));
//alert(that.value);
that.style.backgroundColor = document.querySelector("#color").value;
}
function cGen(that) {
document.getElementById("render").innerHTML = "";
var parent = that.parentElement;
// alert("parent");
var canvasX = Number(document.querySelector("#canvasX").value);
// alert("x");
canvasX = Math.floor(canvasX);
document
.querySelector(":root")
.style.setProperty("--grid-rows", "" + canvasX);
// alert("grid");
var canvasY = Number(document.querySelector("#canvasY").value);
//alert(document.querySelector("#canvasY").value);
canvasY = Math.floor(canvasY);
document
.querySelector(":root")
.style.setProperty("--grid-columns", "" + canvasY);
//alert(canvasY);
var nodeRender = "";
var cloneRender = "";
var nodeControl = "";
var cloneControl = "";
//alert("start loop");
for (let x = 0; x < canvasX; x++) {
for (let y = 0; y < canvasY; y++) {
//alert(" in");
nodeRender = document.getElementById("rendertemplate");
//alert(" past template");
cloneRender = nodeRender.cloneNode(true);
cloneRender.style.display = "grid";
cloneRender.id = "i" + x + "x" + y;
if (y == 0) {
//alert(cloneRender.innerHTML);
}
document.getElementById("render").appendChild(cloneRender);
}
}
}
function setColor(that) {
var color = that.value;
//alert(typeof color);
if (color.includes("#")) {
document.querySelector("#color").value = color;
} else {
document.querySelector("#color").value = "#" + color;
document.querySelector("#picker").value = "#" + color;
}
}
function saveAs(uri, filename) {
var link = document.createElement("a");
if (typeof link.download === "string") {
link.href = uri;
link.download = filename;
//Firefox requires the link to be in the body
document.body.appendChild(link);
//simulate click
link.click();
//remove the link when done
document.body.removeChild(link);
} else {
window.open(uri);
}
}
function save(toFile) {
var canvas = document.createElement("canvas");
var canvasX = Number(document.querySelector("#canvasX").value);
var canvasY = Number(document.querySelector("#canvasY").value);
//alert(canvasX);
//alert(canvasY);
canvas.width = canvasY;
canvas.height = canvasX;
var ctx = canvas.getContext("2d");
var x = 0;
var y = 0;
for (x = 0; x < canvasX; x++) {
for (y = 0; y < canvasY; y++) {
//alert(document.querySelector("#i" + x + "x" + y).style.backgroundColor);
//alert("before fill style");
ctx.fillStyle = document.querySelector(
"#i" + x + "x" + y,
).style.backgroundColor;
//ctx.fillStyle = "#00ff00";
//alert("after fill style");
ctx.fillRect(y, x, 1, 1);
}
}
if (toFile) {
saveAs(canvas.toDataURL("image/png"), " download.png");
} else {
document.getElementById("p2img").value = canvas.toDataURL("image/png");
}
}
function p2ToBubble() {
save();
var bub = new Object();
bub.img = document.getElementById("p2img").value;
document.getElementById("chat_message").value += JSON.stringify(bub);
}
document.addEventListener("DOMContentLoaded", function(event) {
setColor(document.getElementById("picker"));
cGen(document.getElementById("canvasY"));
});

View File

@ -1,13 +1,30 @@
from uuid import uuid4
import Components.hopper as hopper
from Packets.Messages.Protocols.catch.Request import CatchRequest
from Packets.Messages.Protocols.catch.IndexSync import IndexSync
from Packets.Messages.Protocols.hopper.Request import HopperRequest
from Packets.Messages.Protocols.bubble.Bubble import Bubble
from microdot import Microdot
from microdot import send_file
from microdot.websocket import with_websocket
from microdot.websocket import with_websocket, WebSocketError
from microdot import Request
from microdot.jinja import Template
from microdot.session import Session, with_session
import random
import json
import time
import logging
import traceback
import uuid
import re
import msgpack
logger = logging.getLogger("__main__." + __name__)
# Enable 500 kB files in the webui
Request.max_content_length = 1024 * 1024 * 0.5
Request.max_body_length = 1024 * 1024 * 0.5
@ -44,17 +61,30 @@ class Server:
Reference to our Catch Cache instance to pull from for serving Catchs
"""
def __init__(self, transceiver, catch, onodeID, network, cLog):
self.cLog = cLog
def __init__(
self,
transceiver,
catch,
onodeID,
network,
cryptographyInfo,
remoteCatchIndex,
cache,
):
self.transceiver = transceiver
self.network = network
self.network.addLookup(onodeID, self.transceiver.interface.localNode.nodeNum)
self.network.syncaddLookup(onodeID, self.transceiver.interface.localNode.nodeNum)
self.nodeID = str(onodeID)
self.peerIDs = {}
self.app = Microdot()
self.session = Session(
self.app, secret_key=str(uuid.uuid4())
)
self.catch = catch
# self.nmap = {self.nodeID: self.t.interface.localNode.nodeNum}
# self.cLog(20, "Initialized server")
self.cache = cache
self.cryptographyInfo = cryptographyInfo
self.remoteCatchIndex = remoteCatchIndex
logger.info("Initialized server")
@self.app.route("/res/<path:path>")
async def static(request, path):
@ -64,7 +94,45 @@ class Server:
if ".." in path:
# directory traversal is not allowed
return "Not found", 404
return send_file("webui/build/res/" + path, max_age=86400)
return send_file("Splash/build/res/" + path, max_age=86400)
@self.app.route("/")
async def index(request):
"""
Static handler to serve the web ui
"""
try:
return send_file("Splash/build/index/index.html")
except Exception as e:
logger.error(traceback.format_exc())
return "Server error", 500
@self.app.route("/hop/<tmpResourceID>")
async def hop(request, tmpResourceID):
try:
return self.cache.get("tmp/hopper/" + tmpResourceID).get()["html"]
except Exception as e:
logger.error(traceback.format_exc())
return "Server error", 500
@self.app.route("/api/json")
async def api(request):
try:
return {"hello": "world"}
except Exception as e:
logger.error(traceback.format_exc())
return "Server error", 500
@self.app.route("/admin")
@with_session
async def admin(request):
try:
return Template("Splash/admin/admin.html").render(psks=self.getPSKs())
except Exception as e:
logger.error(traceback.format_exc())
return "Server error", 500
@self.app.route("/bubble")
@with_websocket
@ -77,9 +145,12 @@ class Server:
`🔗 HTMX docs <https://htmx.org/docs/>`_
"""
while True:
try:
r = await ws.receive()
message = json.loads(r)
#logger.debug(json.dumps(message, indent=4))
trigger = message["HEADERS"]["HX-Trigger"]
logger.debug(f"Trigger: {trigger}")
# TODO: Drop old id from cache on regen
if trigger == "gpID":
peerID = str(random.randrange(0, 1000000)).zfill(6)
@ -129,20 +200,122 @@ class Server:
senderName = 000000
recipient = message["recipientID"]
recipientNode = message["recipientNode"]
await self.t.addPackets(
msgpack.dumps({"data": data}),
r = Bubble(
sender,
senderName,
self.nodeID,
recipient,
int(recipientNode),
directID=self.network.doLookup(recipientNode),
packetsClass=2,
recipientNode,
self.cryptographyInfo,
data,
)
await self.transceiver.sendMessage(r)
elif trigger == "catch":
res = self.catch.get(message["head"], message["body"])
await ws.send('<div id="catchDisplay">{0}</div>'.format(res))
res = self.catch.get(
message["head"],
message["body"],
fins=message["finsStr"].split(","),
)
if res == False:
await ws.send(
'<div id="catchDisplay">{0}</div>'.format(
"Searching PierMesh for Catch please wait...<img src='/res/img/searching.gif'>"
)
)
peerID = message["catchPeerID"]
q = {
"head": message["head"],
"body": message["body"],
}
fins = None
if "fins" in message:
if message["fins"] != "":
q["fins"] = message["fins"]
fins = message["fins"]
# TODO: Handling multiple results
q = self.remoteCatchIndex.search(q)[0]
if q != False:
m = CatchRequest(
peerID,
000000,
self.nodeID,
q["remoteNode"],
q["remoteNode"],
self.cryptographyInfo,
message["head"],
message["body"],
fins
)
await self.transceiver.sendMessage(m)
# TODO: Daisy replication settings
elif trigger == "hopper":
url = message["url"]
logger.debug(url)
isPost = "isPost" in message.keys()
method = "get"
if isPost:
method = "post"
remote = "remote" in message.keys()
remoteNode = message["remoteNode"]
params = ""
try:
params = json.loads(message["params"])
except json.decoder.JSONDecodeError:
# TODO: Proper error code
# TODO: Fix stalling at startup
# TODO: Request missing packets`
logger.error("Parameters on hop request were not json, dropping")
# TODO: Redirecting to html content
if remote and (remoteNode != ""):
peerID = message["hopperPeerID"]
await ws.send(
'<div id="lilypad">{0}</div>'.format(
"Requesting hop from remote node...<img src='/res/img/searching.gif'>"
)
)
r = HopperRequest(
peerID,
000000,
self.nodeID,
remoteNode,
remoteNode,
url,
params,
method,
self.cryptographyInfo,
)
await self.transceiver.sendMessage(r)
else:
if isPost:
await ws.send(
'<div id="lilypad">{0}</div>'.format(
hopper.post(url, params)
)
)
else:
res = hopper.get(url, params)
if res["content-type"] == "text/html":
logger.debug("Local hopping done, html content found")
resID = uuid4()
self.cache.create(
"tmp/hopper/" + resID, {"html": res}
)
await ws.send(
"<div id='lilypad'><a href='/hop/{0}'></a></div>".format(
resID
)
)
else:
logger.debug("Local hopping done, non html content found")
await ws.send(
'<div id="lilypad">{0}</div>'.format(res)
)
# TODO: Catch update packets
elif trigger == "catchEdit":
if message["eID"] == "":
raise ValueError("Peer ID is blank")
self.catch.addc(
message["eID"],
self.nodeID,
@ -156,22 +329,66 @@ class Server:
<ul id="resultsCatch" hx-swap-oob='true'><li>OK</li></ul>
"""
)
logger.info(self.catch.genIndex(self.nodeID))
indexUpdate = IndexSync(
message["eID"],
0,
self.nodeID,
"-00001",
"-00001",
self.cryptographyInfo,
self.catch.genIndex(self.nodeID)
)
await self.transceiver.sendMessage(indexUpdate)
elif trigger == "admin":
# TODO
pass
else:
await ws.send(
"""<div id="chat_room" hx-swap-oob="beforeend">hi</div>"""
)
except WebSocketError as e:
pass
@self.app.route("/")
async def index(request):
"""
Static handler to serve the web ui
"""
return send_file("webui/build/index/index.html")
# Uncomment below for WebSocket debugging
logger.debug(traceback.format_exc())
return "Server error", 500
except Exception as e:
logger.error(traceback.format_exc())
return "Server error", 500
async def sendToPeer(self, peerID: str, data: str):
async def getPSKs(self):
psks = [
{"psk": v["PSK"], "nodeID": k}
for k, v in self.cryptographyInfo["msg"].items()
]
return psks
# TODO: Send catch to catch display
async def sendToPeer(self, peerID: str, data: str, target: str):
"""
Send data to Websocket of peer with peerID
"""
logger.debug(target)
if target == "bubble":
mapper = []
for ob in re.findall('{(.+?)}', data):
cobs = "{" + ob + "}"
cob = json.loads(cobs)
if "img" in cob.keys():
cimg = cob["img"]
mapper.append([cobs, f"<img style='width:30vw;image-rendering:pixelated;' src='{cimg}'>"])
for map in mapper:
data = data.replace(map[0], map[1])
await self.peerIDs[peerID]["ws"].send(
"<ul id='chat_room' hx-swap-oob='afterend'><li>{0}</li></ul>".format(data)
)
elif target == "catch":
logger.debug("In catch")
await self.peerIDs[peerID]["ws"].send(
"<div id='catchDisplay' style='background-color: var(--palette-three);'>{0}</div>".format(data)
)
elif target == "hopper":
await self.peerIDs[peerID]["ws"].send(
"<div id='lilypad' style='background-color: var(--palette-three);'>{0}</div>".format(data)
)

View File

@ -0,0 +1,16 @@
{% extends "shared/base.html" %}
{% block body %}
<form id="admin" hx-ext="ws" ws-connect="/bubble">
<ul id="psks">
{% for psk in psks %}
<li>
Node ID: {{ psk['nodeID'] }}
<br>
PSK:
<input type="number" id="{{ psk['nodeID'] }}" name="{{ psk['nodeID'] }}" value="{{ psk['psk'] }}" max="999999">
</li>
{% endfor %}
</ul>
<button id="pskUpdate" name="pskUpdate">Update PSK</button>
</form>
{% endblock %}

View File

@ -1,12 +1,9 @@
{% extends "shared/base.html" %}
{% block body %}
<img alt="PierMesh logo" height="128px" src="/res/img/logo.png">
<br>
<br>
{% include "shared/catch.nav.html" %}
<br>
{% include "shared/catch.editor.html" %}
<div>
<img alt="PierMesh logo" height="128px" src="/res/img/logo.png" style="display: inline-block;">
<h1 style="display: inline-block;">PierMesh</h1>
</div>
<div hx-history="false">
</div>
<br>
@ -14,9 +11,15 @@
<p id="vpeerID">Peer ID:</p>
<input id="peerID" type="hidden">
<p id="vnodeID">Node ID:</p>
<input id="peerID" type="hidden" >
<input id="nodeID" type="hidden">
<button id="gpID" ws-send>Connect</button>
</div>
<br>
{% include "shared/hopper.html" %}
<br>
{% include "shared/catch.nav.html" %}
<br>
{% include "shared/catch.editor.html" %}
<br>
{% include "shared/messenger.html" %}
{% endblock %}

View File

@ -10,6 +10,8 @@
<script src="/res/js/node_modules/htmx.org/dist/htmx.min.js"></script>
<script src="/res/js/ws.js">
</script>
<script src="/res/js/custom.js">
</script>
</head>
<body>

View File

@ -1,10 +1,10 @@
<div class="plank" hx-ext="ws" ws-connect="/bubble">
<img src="/res/img/catchdisplay.png">
<br>
Catch publisher<br>
{% extends "shared/plank.html" %}
{% set plankTitle = "catch editor" %}
{% set formID = "catchEdit" %}
{% set icon = "catchdisplay.png" %}
{% block insert %}
<ul id="resultsCatch">
</ul>
<form id="catchEdit" ws-send>
Head <br> <input type="text" name="head" size="4" maxlength="4"><br>
Seperator <br> <input type="text" name="sep" size="1" maxlength="1"><br>
Body <br> <input type="text" name="body" size="16" maxlength="16"><br>
@ -19,5 +19,5 @@
<br>
<button onclick="document.getElementById('eID').value = document.getElementById('peerID').value">Publish</button>
<input type="hidden" name="eID" id="eID">
</form>
</div>
{% endblock %}

View File

@ -1,8 +1,7 @@
<div class="plank" hx-ext="ws" ws-connect="/bubble">
<img src="/res/img/catchdisplay.png">
<br>
Catch<br><br>
<form id="catch" ws-send>
{% extends "shared/plank.html" %}
{% set plankTitle = "catch" %}
{% set formID = plankTitle %}
{% block insert %}
<label for="head">Head (max. 4 characters)</label>
<br>
<input type="text" id="head" name="head" size="4" maxlength="4">
@ -15,17 +14,18 @@
<input type="text" id="body" name="body" size="16" maxlength="16">
<ul id="fins">
Fins:
<li class="fin">
<input type="text" size="8" maxlength="8">
</li>
<li>
<button>+</button>
<li id="pfin" class="fin">
<input type="text" id="fin00" name="fin00" size="8" maxlength="8">
</li>
</ul>
<button>Get</button>
</form>
<button onclick="addFin();">+ Fin</button>
<input type="hidden" id="catchPeerID" name="catchPeerID">
<input type="hidden" id="finsStr" name="finsStr">
<br>
<button onclick="getCatch()">Get</button>
<br>
Results:
<br>
{% include "shared/catch.html" %}
<br>
</div>
{% endblock %}

View File

@ -0,0 +1,30 @@
{% extends "shared/plank.html" %}
{% set plankTitle = "hopper" %}
{% set formID = plankTitle %}
{% block insert %}
<label for="url">URL</label>
<br>
<input type="text" id="url" name="url" maxlength="255">
<br>
<label for="params">Parameters (json)</label>
<br>
<input type="textarea" id="params" name="params">
<br>
<label for="isPost">Unchecked: GET, Checked: POST</label>
<br>
<input type="checkbox" id="isPost" name="isPost">
<br>
<label for="remote">Use remote node?</label>
<br>
<input type="checkbox" id="remote" name="remote">
<br>
<label for="remoteNode">Remote node ID</label>
<br>
<input type="number" id="remoteNode" name="remoteNode" max="999999">
<br>
<br>
<input id="hopperPeerID" name="hopperPeerID" type="hidden">
<button onclick="hopID();">Get</button>
<br>
<div style="background-color: var(--palette-three);" id="lilypad"></div>
{% endblock %}

View File

@ -1,20 +1,18 @@
<div class="plank" hx-ext="ws" ws-connect="/bubble">
<img src="/res/img/bubbledisplay.png">
<br>
Bubble
<br>
<br>
{% extends "shared/plank.html" %}
{% set plankTitle = "bubble" %}
{% set formID = plankTitle %}
{% block insert %}
Responses: <ul id="chat_room" hx-swap="afterend">
</ul>
<br>
<form id="bubble" ws-send>
{% include "shared/p2chat.html" %}
Peer ID:<br>
<input name="recipientID" id="recipientID" type="number" max="999999"><br>
Node ID:<br>
<input name="recipientNode" id="recipientNode" type="number" max="999999"><br>
Data<br> <textarea style="min-width: 200px;min-height: 200px;" type="textarea" name="chat_message"></textarea>
Data<br> <textarea style="min-width: 200px;min-height: 200px;" type="textarea" name="chat_message"
id="chat_message"></textarea>
<br>
<input type="hidden" name="bID" id="bID">
<button onclick="document.getElementById('bID').value = document.getElementById('peerID').value">Send</button>
</form>
</div>
{% endblock %}

View File

@ -0,0 +1,22 @@
<div id="p2chat">
<input type="hidden" id="p2img">
<input type="hidden" id="color" value="#000000">
Background color:
<div id="rendertemplate" style="max-width: 20px;min-height:20px;background-color: #000000;"
onclick="try{splash(this);}catch(e){alert(e);}"></div>
<br>
<div id="controls">
Color picker: <input type="color" onchange="try{setColor(this);}catch(e){alert(e);}" value="#ffffff" id="picker">
Hex input: <input type="text" maxlength="6" onchange="try{setColor(this);}catch(e){alert(e);}" value="000000" />
</div>
<br>
<div id="create">
X<input type="number" min="8" max="64" placeholder="8" id="canvasX" value="8" />
Y<input type="number" min="8" max="64" placeholder="8" id="canvasY" value="8" />
<button onclick="try{cGen(this);}catch(e){alert(e);}">Generate workspace</button>
<button onclick="try{p2ToBubble();}catch(e){alert(e);}">Add to message</button>
</div>
<br>
<div id="render">
</div>
</div>

View File

@ -0,0 +1,18 @@
<div class="plank" hx-ext="ws" ws-connect="/bubble">
{% if icon is defined %}
<img src="/res/img/{{ icon }}">
{% else %}
<img src="/res/img/{{ plankTitle }}display.png">
{% endif %}
{{ plankTitle|capitalize }}<br><br>
<br>
<button onclick="toggle('{{ plankTitle }}PI');">Display/Hide</button>
<br>
<br>
<div class="plankInner" id="{{ plankTitle }}PI">
<form id="{{ formID }}" ws-send>
{% block insert %}
{% endblock %}
</form>
</div>
</div>

View File

@ -0,0 +1,184 @@
from Services.Action import Action
from Config.Context import Context
from Packets.Message import Message
import logging
logger = logging.getLogger("__main__." + __name__)
class YCTX(Context):
def __init__(
self,
packetsID,
packetCount,
pAction,
todo,
cryptographyInfo,
sourceNode,
subMessage=False,
subMessages={},
submessagesIDs=[],
eData=None,
):
super().__init__(
sourceNode=sourceNode,
packetsID=packetsID,
packetCount=packetCount,
pAction=pAction,
cryptographyInfo=cryptographyInfo,
subMessages=subMessages,
submessagesIDs=submessagesIDs,
eData=eData,
subMessage=subMessage,
)
self.todo = todo
class Yellow:
# TODO: Submessage completion actions
pActions = []
def __init__(
self,
yctx: YCTX,
):
self.yctx = yctx
self.message = None
self.submessages = yctx["subMessages"]["val"]
self.submessagesIDs = yctx["submessagesIDs"]["val"]
self.finishedSubmessages = {}
self.dataOrder = []
self.data = []
self.nonce = None
self.tag = None
self.gotHead = False
self.todo = yctx.todo
if yctx["eData"]["val"] != None:
self.dataOrder = yctx["eData"]["val"]["dataOrder"]
self.data = yctx["eData"]["val"]["data"]
def checkComplete(self):
logger.log(30, "In check complete")
for submessage in self.submessagesIDs:
submessage = str(submessage)
if submessage not in self.finishedSubmessages.keys():
logger.log(30, f"Submessage {submessage} missing")
return False
logger.log(30, "Submessages complete")
return True
async def id(self):
return self.packetsID
async def processPacket(self, p, subMessage=False, rdoaoc=[]):
doCheck = True
if subMessage == False and p["packetNumber"] != False:
#if not p["packetNumber"] in self.dataOrder:
if p["packetNumber"] not in self.dataOrder:
self.data.append(p["data"])
self.dataOrder.append(p["packetNumber"])
logger.log(10, "data")
logger.log(10, self.data)
else:
doCheck = False
elif p["packetNumber"] == False:
if not self.gotHead:
self.data.append(False)
self.dataOrder.append(False)
self.submessagesIDs = p["submessages"]
self.gotHead = True
else:
doCheck = False
else:
if not str(p["packetsID"]) in self.submessages.keys():
self.submessages[str(p["packetsID"])] = {
"data": [],
"dataOrder": []
}
if p["packetNumber"] not in self.submessages[str(p["packetsID"])]["dataOrder"]:
self.submessages[str(p["packetsID"])]["data"].append(p["data"])
self.submessages[str(p["packetsID"])]["dataOrder"].append(
p["packetNumber"])
else:
doCheck = False
if doCheck:
if subMessage != False:
if len(self.submessages[str(p["packetsID"])]["data"]) > (p["packetCount"]-1):
self.finishedSubmessages[str(p["packetsID"])] = Message.reassemble(
None, self.submessages[str(p["packetsID"])], self.yctx["cryptographyInfo"]["val"], packetCount=p["packetCount"], subMessage=True, yctx=self.yctx
)
# TODO: Trigger doact
return await self.doAct(subMessage=self.finishedSubmessages[str(p["packetsID"])])
elif len(self.data) >= (self.yctx["packetCount"]["val"]):
logger.log(
20, "Finished receiving for primary message " +
str(self.yctx["packetsID"]["val"])
)
"""
if self.yctx["wantFullResponse"]["val"] != False:
# TO DO: implement loop
# responseLoop(packets_id)
pass
"""
# self.data = Message.reassemble(None, self, self.yctx["cryptographyInfo"]["val"])
return await self.doAct(repeatDataOnActions=rdoaoc)
async def dump(self):
msg = {}
msg["packetsID"] = self.yctx["packetsID"]["val"]
msg["data"] = self.data
msg["pAction"] = self.yctx["pAction"]["val"]
msg["submessages"] = self.submessages
return msg
async def doAct(
self,
setpAction=False,
repeatDataOnActions=[],
subMessage=False
):
# TODO; Get submessage instead of primary
m = await self.dump()
if subMessage != False:
logger.log(30, "Submessage complete")
# self.submessages[m["packetsID"]] = self.yctx["crytopgraphyInfo"]["val"].decrypt(m["data"])
# TODO: Debug
logger.log(10, subMessage)
if "nonce" in subMessage.keys():
self.nonce = subMessage["nonce"]
self.tag = subMessage["tag"]
if self.checkComplete():
if len(self.data) >= (self.yctx["packetCount"]["val"]):
# TODO: Raising out of bounds error
pAction = self.pActions[int(m["pAction"])]
logger.log(30, "Full message completed")
# TODO: Decrypt
self.data = Message.reassemble(
None, self, self.yctx["cryptographyInfo"]["val"])
"""
data = self.cryptographyInfo.decrypt(m["data"])
if data == False:
self.cryptographyInfo.sessionSetup()
data = self.cryptographyInfo.decrypt(m["data"])
"""
# TODO: Go over whether we need to limit
# this from being a field in the data for
# primary messages
"""
todo.append(
Action(
"cLog", {"message": "Unknown pAction " + m["pAction"], "priority": 20}
)
)
)
"""
self.data["submessages"] = self.submessages
self.data["yctx"] = self.yctx
act = Action(pAction, self.data)
self.todo.append(act)
if len(repeatDataOnActions) > 0:
for caction in repeatDataOnActions:
cact = Action(caction, self.data)
#self.todo.add(cact)
self.todo.append(cact)

View File

@ -0,0 +1,10 @@
from . import hopper, map, cryptography, catch, bubble, daisy
classDict = {
"bubble": bubble.Bubble,
"catch": catch.Catch,
"cryptography": cryptography.CryptographyFilter,
"daisy": daisy.Daisy,
"hopper": hopper.Hopper,
"map": map.Map,
}

View File

@ -1,19 +1,12 @@
async def filter(completeMessage, recipient, recipientNode, onodeID, todo):
from Sponge.Protocols.Yellow import Yellow
# TODO: Forwarding message to next node
# TODO: Method to get next node in path to recipient node
class Bubble(Yellow):
"""
Peer to peer protol
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/Sponge/Protocols/bubble.py>`__
"""
m = completeMessage
if recipientNode == onodeID:
todo.append(
{
"action": "sendToPeer",
"data": {"res": m["data"]["data"], "recipient": recipient},
}
)
else:
# TODO: Forwarding message to next node
# TODO: Method to get next node in path to recipient node
# self.t.addPackets(m.data, sender, senderDisplay, recipient, recipientNode)
pass
pActions = ["sendToPeer"]

View File

@ -1,21 +1,11 @@
async def filter(completeMessage, recipient, recipientNode, todo):
from Sponge.Protocols.Yellow import Yellow
class Catch(Yellow):
"""
Catch exchange protocol
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py>`__
"""
m = completeMessage
# TODO: Sending to other nodes clients
todo.append(
{
"action": "sendCatch",
"data": {
"toLocal": True,
"recipientNode": recipientNode,
"recipient": recipient,
"head": m["head"],
"body": m["body"],
"fins": m["fins"],
},
}
)
pActions = ["sendCatch", "routeCatch", "syncIndex"]

View File

@ -1,17 +1,11 @@
async def filter(completeMessage, recipientNode, todo):
from Sponge.Protocols.Yellow import Yellow
class CryptographyFilter(Yellow):
"""
Cryptographic operations protocol
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/cryptography.py>`__
"""
todo.append(
{
"action": "keyDeriveDH",
"data": {
"publicKey": completeMessage["data"]["publicKey"],
"params": completeMessage["data"]["params"],
"recipientNode": recipientNode,
},
}
)
# logging.log(10, "Adding cryptography request")
pActions = ["initCryptography"]

View File

@ -0,0 +1,9 @@
from Sponge.Protocols.Yellow import Yellow
class Daisy(Yellow):
"""
Catch exchange protocol
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py>`__
"""

View File

@ -0,0 +1,11 @@
from Sponge.Protocols.Yellow import Yellow
class Hopper(Yellow):
"""
Internet inter(h)op protocol
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/hopper.py>`__
"""
pActions = ["hop", "routeHop"]

View File

@ -1,22 +1,17 @@
async def filter(completeMessage, todo):
from Sponge.Protocols.Yellow import Yellow
class Map(Yellow):
"""
Network mapping protocol
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/map.py>`__
"""
m = completeMessage
todo.append(
{
"action": "map",
"data": {
"onodeID": m["data"]["onodeID"],
"mnodeID": m["data"]["mnodeID"],
},
}
)
todo.append(
{
"action": "initNodeDH",
"data": {"mnodeID": m["data"]["mnodeID"], "onodeID": m["data"]["onodeID"]},
}
)
pActions = ["map", "initCryptography"]
def process(self, message, isSubMessage=False):
rdoa = []
if message["pAction"] == 0:
rdoa.append(self.pActions[1])
super().process(message, isSubMessage=isSubMessage, repeatDataOnActions=rdoa)

View File

@ -1,13 +1,14 @@
import logging
import msgpack
import traceback
from Packets.Message import Message
import Sponge.Protocols.bubble
import Sponge.Protocols.map
import Sponge.Protocols.catch
import Sponge.Protocols.cryptography
import Sponge.Protocols as Protocols
logger = logging.getLogger("__main__." + __name__)
class Filter:
"""
Packet filtering orchestration
@ -30,13 +31,14 @@ class Filter:
PierMesh node ID
"""
def __init__(self, cache, onodeID, todo, cLog):
self.cLog = cLog
def __init__(self, cache, onodeID, todo, cryptographyInfo):
self.cache = cache
self.cryptographyInfo = cryptographyInfo
"""
Messages is temporary storage for unfinished messages
"""
self.messages = {}
self.submessages = {}
self.completed = []
self.todo = todo
self.onodeID = onodeID
@ -49,7 +51,7 @@ class Filter:
msgpack.loads(payload)
return True
except Exception as e:
self.cLog(20, "Not msgpack encoded, skipping")
logger.debug("Not msgpack encoded, skipping")
return False
def selfCheck(self, packet):
@ -57,93 +59,128 @@ class Filter:
Check if this is a self packet, if so skip
"""
if packet["fromId"] == packet["toId"]:
self.cLog(20, "Self packet, ignored")
logger.log(20, "Self packet, ignored")
return False
else:
return True
async def protoMap(self, protocolID: int):
async def protoMap(self, protocolID: int, packetsID, packetCount, sourceNode, submessagesIDs=[], pAction=None):
"""
Get protocol from protocol ID using the mlookup table
"""
protocolID = str(protocolID).zfill(6)
return self.cache.get("mlookup").get()[protocolID]
proto = self.cache.get("mlookup").get()[protocolID]
subMessages = {}
for pid in submessagesIDs:
if pid in self.submessages:
subMessages[pid] = self.submessages[pid]
ctx = Protocols.Yellow.YCTX(
packetsID,
packetCount,
pAction,
self.todo,
self.cryptographyInfo,
sourceNode,
submessagesIDs=submessagesIDs,
subMessages=subMessages
)
cf = Protocols.classDict[proto](
ctx
)
return cf
async def protoRoute(self, completeMessage: dict):
"""
Route message to proper protocol handler
"""
m = completeMessage
"""
Shorthand reference completeMessage for ease
"""
sender = m["sender"]
senderDisplay = m["senderDisplayName"]
recipient = m["recipient"]
recipientNode = m["recipientNode"]
# TODO: Fix packets to use class
protocol = await self.protoMap(m["packetsClass"])
self.cLog(20, "Protocol: " + protocol)
if protocol == "bubble":
await Sponge.Protocols.bubble.filter(
m, recipient, recipientNode, self.onodeID, self.todo
)
elif protocol == "map":
await Sponge.Protocols.map.filter(m, self.todo)
elif protocol == "catch":
await Sponge.Protocols.catch.filter(m, recipient, recipientNode, self.todo)
elif protocol == "cryptography":
await Sponge.Protocols.cryptography.filter(
completeMessage, recipientNode, self.todo
)
else:
self.cLog(30, "Cant route, no protocol")
# async def protoRoute(self, completeMessage: dict):
# """
# Route message to proper protocol handler
# """
# m = completeMessage
# """
# Shorthand reference completeMessage for ease
# """
# sender = m["sender"]
# senderDisplay = m["senderDisplayName"]
# recipient = m["recipient"]
# recipientNode = m["recipientNode"]
# protocol = await self.protoMap(m["packetsClass"])
# logger.log(20, "Protocol: " + protocol)
# if protocol == "bubble":
# await Sponge.Protocols.bubble.filter(
# m, recipient, recipientNode, self.onodeID, self.todo
# )
# elif protocol == "map":
# await Sponge.Protocols.map.filter(m, self.todo)
# elif protocol == "catch":
# await Sponge.Protocols.catch.filter(m, recipient, recipientNode, self.todo)
# elif protocol == "cryptography":
# await Sponge.Protocols.cryptography.filter(
# completeMessage, recipientNode, self.todo
# )
# elif protocol == "hopper":
# await Sponge.Protocols.hopper.filter(
# completeMessage, self.todo, recipient, recipientNode
# )
# else:
# logger.log(30, "Cant route, no protocol")
async def sieve(self, packet):
"""
Base filtering logic, takes a single MeshTastic packet
"""
# TODO: Instantiating the protocol on submessage
p = packet["decoded"]["payload"]
if self.selfCheck(packet) and self.mCheck(p):
try:
p = msgpack.loads(p)
self.cLog(20, p)
logger.log(20, p)
packetsID = p["packetsID"]
packetsClass = p["packetsClass"]
if packetsID == 0:
self.cLog(20, "Single packet")
logger.log(20, "Single packet")
# Do sp handling
pass
if packetsID in self.completed:
raise ValueError("Message already completed")
if not (packetsID in self.messages):
self.messages[packetsID] = {
"packetCount": p["packetCount"],
if not (packetsID in self.messages) and (
not "wantFullResponse" in p.keys()
):
if "primaryMessage" in p.keys():
if p["primaryMessage"] in self.messages.keys():
# TODO: Temporary store for submessages if main hasnt arrived
# TODO: Check for submessages when instantiating
await self.messages[p["primaryMessage"]].processPacket(p, subMessage=True)
else:
if not str(p["packetsID"]) in self.submessages.keys():
self.submessages[str(p["packetsID"])] = {
"data": [],
"finished": False,
"dataOrder": [],
"dataOrder": []
}
if "wantFullResponse" in p.keys():
for k in p.keys():
if k != "data":
self.messages[packetsID][k] = p[k]
elif not p["packetNumber"] in self.messages[packetsID]["dataOrder"]:
self.messages[packetsID]["data"].append(p["data"])
self.messages[packetsID]["dataOrder"].append(p["packetNumber"])
if (len(self.messages[packetsID]["data"])) >= (
self.messages[packetsID]["packetCount"] - 1
) and ("wantFullResponse" in self.messages[packetsID].keys()):
self.cLog(20, "Finished receiving for message " + str(packetsID))
self.messages[packetsID]["finished"] = True
if self.messages[packetsID]["wantFullResponse"] != False:
# TO DO: implement loop
# responseLoop(packets_id)
pass
completeMessage = self.messages[packetsID]
completeMessage["data"] = Message.reassemble(None, completeMessage)
del self.messages[packetsID]
self.completed.append(packetsID)
self.cLog(20, "Assembly completed, routing")
await self.protoRoute(completeMessage)
self.submessages[str(p["packetsID"])]["data"].append(p["data"])
self.submessages[str(p["packetsID"])]["dataOrder"].append(p["packetNumber"])
#await self.messages[p["primaryMessage"]].processPacket(p, subMessage=True)
else:
self.messages[packetsID] = await self.protoMap(
p["packetsClass"], packetsID, p["packetCount"], 0
)
await self.messages[packetsID].processPacket(p)
elif "wantFullResponse" in p.keys():
if packetsID in self.messages.keys():
self.messages[packetsID].yctx["pAction"]["val"] = p["pAction"]
self.messages[packetsID].yctx["sourceNode"]["val"] = p["sourceNode"]
# self.messages[packetsID].submessageIDs = p["submessages"]
await self.messages[packetsID].processPacket(p)
else:
self.messages[packetsID] = await self.protoMap(
p["packetsClass"],
packetsID,
p["packetCount"],
p["sourceNode"],
submessagesIDs=p["submessages"],
pAction=p["pAction"]
)
await self.messages[packetsID].processPacket(p)
else:
await self.messages[packetsID].processPacket(p)
except Exception:
self.cLog(30, traceback.format_exc())
logger.log(30, traceback.format_exc())

View File

@ -1,13 +1,25 @@
import meshtastic
import meshtastic.serial_interface
from pubsub import pub
from Packets.Message import Message
from Packets.Messages.Protocols.map.Announce import AnnounceMessage
from Packets.SubMessage import SubMessage
import time
import sys
import random
import msgpack
import asyncio
import uuid
import traceback
import logging
logger = logging.getLogger("__main__." + __name__)
class Transceiver:
"""
@ -50,8 +62,10 @@ class Transceiver:
"""
def __init__(self, device, filter, onodeID, cache, catch, cryptographyInfo, cLog):
self.cLog = cLog
def __init__(self, device, filter, onodeID, cache, catch, cryptographyInfo, network):
try:
self.busy = False
self.network = network
self.cryptographyInfo = cryptographyInfo
self.filter = filter
self.tcache = cache
@ -63,57 +77,60 @@ class Transceiver:
# Be careful with this
self.cpid = 0
self.tasks = {}
# TODO: use node id to deliver directly
pub.subscribe(self.onReceive, "meshtastic.receive")
pub.subscribe(self.onConnection, "meshtastic.connection.established")
self.interface = meshtastic.serial_interface.SerialInterface(device)
i = 0
while self.notConnected:
if i % 5000000 == 0:
self.cLog(20, "Waiting for node initialization...")
logger.log(20, "Waiting for node initialization...")
i += 1
self.cLog(20, "Initialized")
logger.log(20, "Initialized")
except Exception as e:
logger.log(30, traceback.format_exc())
raise e
# TODO: Sending packets across multiple nodes/load balancing/distributed packet transmission/reception
def onReceive(self, packet, interface):
"""
Run each received packet through Sponge.base.Filters sieve using a new event loop
"""
self.busy = True
asyncio.new_event_loop().run_until_complete(self.filter.sieve(packet))
self.busy = False
self.tcache.refresh()
async def sendAnnounce(self):
async def sendAnnounce(self, dontRespond=False):
"""
Send an announce packet (contains basic network mapping information) every so often so new nodes autoconnect
"""
await self.addPackets(
msgpack.dumps(
{
"onodeID": self.onodeID,
"mnodeID": self.interface.localNode.nodeNum,
}
),
try:
r = AnnounceMessage(
self.onodeID,
None,
True,
None,
packetsClass=0,
0,
self.onodeID,
self.cryptographyInfo,
{"onodeID": self.onodeID, "mnodeID": self.interface.localNode.nodeNum, "dontRespond": dontRespond},
)
await self.sendMessage(r)
except:
logger.log(30, traceback.format_exc())
def onConnection(self, interface, topic=pub.AUTO_TOPIC):
"""
When the node connects start announce loop and end the waiting loop
"""
asyncio.run(self.sendAnnounce())
#asyncio.run(self.sendAnnounce())
self.notConnected = False
def responseCheck(self, packet):
"""
On acknowldgement response set acks based on response
TODO: Stop this being sent to sieve
"""
rid = packet["decoded"]["requestId"]
if packet["decoded"]["routing"]["errorReason"] == "MAX_RETRANSMIT":
self.cLog(20, "Got ack error")
logger.debug(f"Got ack error for packet {rid}")
self.acks[str(rid)] = False
else:
self.acks[str(rid)] = True
@ -185,6 +202,7 @@ class Transceiver:
break
return True
# TODO: Deprecate
async def addPackets(
self,
data,
@ -234,12 +252,13 @@ class Transceiver:
if recipientNode == None:
self.send(p)
else:
self.cLog(10, "Sending target: " + str(directID))
logger.log(10, "Sending target: " + str(directID))
if directID != False:
recipientNode = directID
self.send(p, recipientNode=recipientNode)
awaitTask = asyncio.create_task(self.awaitResponse(self.cpid))
await asyncio.sleep(1)
# TODO: pid fix
currentTask = {
"ob": awaitTask,
"pid": str(self.cpid),
@ -248,13 +267,79 @@ class Transceiver:
}
self.tasks[str(self.cpid)] = currentTask
async def sendMessage(self, message: Message):
try:
# self.busy = True
# TODO: Send nonce subMessage after main message
# TODO: Instantiate SubMessages with HeaderPacket of primary message
hpacket = message.fullPackets[0]
logger.log(30, message.nonce)
nsm = SubMessage(
hpacket.sender,
hpacket.senderDisplayName,
hpacket.sourceNode,
hpacket.recipient,
hpacket.recipientNode,
self.cryptographyInfo,
hpacket.packetsClass,
hpacket.pAction,
{"nonce": message.nonce, "tag": message.tag},
target=message.target,
primaryMessage=hpacket.packetsID
)
message.fullPackets[0].submessages.append(nsm.packetsID)
message.packets[0] = message.fullPackets[0].dump()
logger.log(30, message.packets[0])
logger.log(30, nsm.packets)
for m in [{"primary": message}, {"nonce": nsm}]:
while self.busy:
await asyncio.sleep(2)
m = [v for v in m.values()][0]
# logger.log(30, m)
await asyncio.sleep(5)
isHead = True
curPacketCount = len(m.packets)
for it, p in enumerate(m.packets):
it += 1
pid = str(uuid.uuid4())
#print(p)
logger.info(f"Packet {it:03d}/{curPacketCount:03d}, Size is: " + str(sys.getsizeof(p)))
doNumber = 3
if isHead:
doNumber = 5
isHead = False
if m.target == False:
logger.log(10, "Sending any")
for i in range(doNumber):
self.send(p)
await asyncio.sleep(1)
else:
logger.log(10, "Sending target: " + str(m.recipientNode))
for i in range(doNumber):
self.send(p, recipientNode=self.network.doLookup(m.recipientNode))
await asyncio.sleep(1)
awaitTask = asyncio.create_task(self.awaitResponse(pid))
currentTask = {
"ob": awaitTask,
"pid": pid,
"packet": p,
"retry": False,
}
self.tasks[pid] = currentTask
await asyncio.sleep(3)
except Exception:
logger.log(30, traceback.format_exc)
self.busy = False
async def progressCheck(self):
"""
Checks if acknowldgement was received per packet and if not resends
"""
while True:
await asyncio.sleep(90)
self.cLog(
await asyncio.sleep(480)
logger.log(
20, "Checking progress of {0} tasks".format(len(self.tasks.keys()))
)
doneFlag = True
@ -271,12 +356,12 @@ class Transceiver:
elif retry < 3:
retry += 1
else:
self.cLog(30, "Too many retries")
logger.log(30, "Too many retries")
remove = True
if remove:
del self.tasks[task["pid"]]
else:
self.cLog(20, "Doing retry")
logger.log(20, "Doing retry")
doneFlag = False
# TODO: Resend to specific node
self.send(task["packet"])
@ -299,7 +384,17 @@ class Transceiver:
"""
Announce loop runner
"""
while (self.notConnected or self.busy):
if self.busy:
logger.log(30, "Transceiver is busy, waiting")
await asyncio.sleep(2)
while True:
self.cLog(10, "Announce")
wait = random.randrange(600, 900)
logger.info(f"Waiting {wait} seconds for next announce")
await asyncio.sleep(wait)
while self.busy:
logger.info("Transceiver is busy, waiting")
await asyncio.sleep(2)
await asyncio.sleep(5)
logger.info("Announce")
await self.sendAnnounce()
await asyncio.sleep(180)

2
src/debug Executable file
View File

@ -0,0 +1,2 @@
pylint --errors-only --disable=C,R run.py > tmp.debug
nano tmp.debug

View File

@ -3,44 +3,136 @@ from Sponge.base import Filter
from Siph.map import Network
from Daisy.Catch import Catch
from Daisy.Cache import Cache
from Daisy.Index import Index
from Daisy.CryptographyUtil import SteelPetal
from Splash.serve import Server
from Transceiver.Transceiver import Transceiver
from Cryptography.WhaleSong import DHEFern
from ui import TUI
from Cryptography.WhaleSong import Transport
import Components.hopper as hopper
from Packets.Messages.Protocols.hopper.Response import HopperResponse
from Packets.Messages.Protocols.catch.Response import CatchResponse
from Packets.Messages.Protocols.cryptography.Handshake import Handshake
import tlog
from tlog import VHandler
# Generic imports
import logging
import os
import asyncio
import sys
import time
import datetime
import traceback
import threading
import random
import argparse
import configparser
import shutil
import queue
import json
# Process management library
import psutil
if __name__ == "__main__":
global nodeOb, tuiOb
"""
Global objects for the PierMesh service and the TUI so we can terminate the associated processes later
"""
nodeOb = None
tuiOb = None
# Pull startup parameters
device, webPort, serverInfoFile, delay, nodeNickname = sys.argv[1:]
import uvloop
# Set up file based logging
logPath = "logs"
fileName = datetime.datetime.now().strftime("%m%d%Y_%H%M%S")
logFormat = "%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s"
logging.basicConfig(
format=logFormat,
level=logging.INFO,
filename="{0}/{2}_{1}.log".format(logPath, fileName, nodeNickname),
# TODO: Switch config to toml
# TODO: Better protocol management
# TODO: Dry run
if __name__ == "__main__":
"""
Global objects for the PierMesh service and the TUI so we can terminate the associated processes later and configuration objects for the command line and config (.piermesh)
"""
logger = ""
passkey = input("Enter node decryption key: ")
#psk = input("Enter psk: ")
nodeOb = None
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--device", help="Set transceiver device path")
parser.add_argument("-p", "--port", help="Web UI server port")
parser.add_argument("-n", "--nickname", help="Node nickname")
parser.add_argument(
"-s", "--startupDelay", help="Startup delay (useful for testing)"
)
parser.add_argument(
"-o", "--override", help="Whether to override config", default=False
)
parser.add_argument("-x", "--showTUI",
help="Whether to show TUI", default=True)
parser.add_argument(
"-l",
"--confList",
help="Comma separated list of conf files to load for multi-node mode (TODO)",
default=False,
)
parser.add_argument(
"-k",
"--PSK",
help="Pre shared key for initializing communications",
default=False,
)
argConfig = parser.parse_args()
config = configparser.ConfigParser()
if argConfig.confList != False:
pass
if not os.path.exists(".piermesh"):
print("No config at .piermesh, duplicating and loading example config...")
shutil.copyfile(".piermesh.example", ".piermesh")
config.read(".piermesh")
device = ""
if "transceiverPort" in config["OPERATOR_REQUIRED"]:
if argConfig.override:
device = argConfig.device
else:
device = config["OPERATOR_REQUIRED"]["transceiverPort"]
else:
if argConfig.device == False:
print("No device set exiting...")
exit(0)
else:
device = argConfig.device
webPort = config["DEFAULT"]["WebUIPort"]
if argConfig.override:
webPort = argConfig.port
else:
if "WebUIPort" in config["OPERATOR_OVERRIDES"]:
webPort = config["OPERATOR_OVERRIDES"]["WebUIPort"]
webPort = int(webPort)
delay = config["DEFAULT"]["StartupDelay"]
if argConfig.override:
delay = argConfig.startupDelay
else:
if "StartupDelay" in config["OPERATOR_OVERRIDES"]:
delay = config["OPERATOR_OVERRIDES"]["StartupDelay"]
delay = int(delay)
nodeNickname = config["DEFAULT"]["Nickname"]
if argConfig.override:
nodeNickname = argConfig.nickname
else:
if "Nickname" in config["OPERATOR_OVERRIDES"]:
nodeNickname = config["OPERATOR_OVERRIDES"]["Nickname"]
showTUI = config["DEFAULT"]["ShowTUI"]
if argConfig.override:
showTUI = argConfig.showTUI
else:
if "ShowTUI" in config["OPERATOR_OVERRIDES"]:
showTUI = config["OPERATOR_OVERRIDES"]["ShowTUI"]
showTUI = bool(showTUI)
psk = False
if argConfig.override:
if argConfig.PSK == False:
print("No PSK set quitting...")
exit(0)
else:
psk = argConfig.PSK
#else:
if "PSK" in config["OPERATOR_REQUIRED"]:
psk = config["OPERATOR_REQUIRED"]["PSK"]
print("Staggering {0} seconds please wait".format(delay))
time.sleep(delay)
class Node:
@ -93,37 +185,80 @@ class Node:
"""
def __init__(self):
self.toLog = []
actionsList = [f for f in dir(self) if "action" in f]
self.actions = {}
for a in actionsList:
self.actions[a.split("_")[1]] = getattr(self, a)
self.todo = []
self.cLog(20, "Past action mapping")
logger.log(20, "Past action mapping")
self.network = Network()
self.catch = Catch(walk=True)
self.cache = Cache(walk=True)
self.daisyCryptography = SteelPetal(passkey)
# TODO: Better directory structure
self.catch = Catch(self.daisyCryptography, walk=True, path=nodeNickname)
self.cache = Cache(self.daisyCryptography, walk=True)
self.cache.get("mlookup").json_to_msg("daisy/mlookup")
logger.info("Protocols:\n" + json.dumps(self.cache.get("mlookup").get(), indent=4))
self.remoteCatchIndex = Index(nodeNickname, self.daisyCryptography)
serverInfoFile = nodeNickname + ".info"
self.nodeInfo = self.cache.get(serverInfoFile)
if self.nodeInfo == False:
self.cache.create(serverInfoFile, {"nodeID": random.randrange(0, 1000000)})
# TODO: Get/set toml
self.cache.create(
serverInfoFile, {"nodeID": random.randrange(0, 1000000)})
self.nodeInfo = self.cache.get(serverInfoFile)
self.network.addin(self.nodeInfo.get()["nodeID"])
self.cLog(20, "Siph network stack initialized")
logger.log(20, "Siph network stack initialized")
self.onodeID = str(self.nodeInfo.get()["nodeID"])
self.server = None
self.sponge = Filter(self.cache, self.onodeID, self.todo, self.cLog)
self.cLog(20, "Filter initialized")
self.cLog(10, "Command line arguments: " + ", ".join(sys.argv))
self.oTransceiver = None
self.cLog(20, "Cryptography initializing")
self.cryptographyInfo = DHEFern(self.cache, nodeNickname, self.cLog)
self.cLog(20, "Cryptography initialized")
logger.log(20, "Cryptography initializing")
self.cryptographyInfo = Transport(
self.cache, nodeNickname, self.daisyCryptography, psk
)
logger.log(20, "Cryptography initialized")
self.sponge = Filter(self.cache, self.onodeID,
self.todo, self.cryptographyInfo)
logger.log(20, "Filter initialized")
# TODO: Run in different thread, dispatch through queue
self.oTransceiver = Transceiver(
device,
self.sponge,
self.onodeID,
self.cache,
self.catch,
self.cryptographyInfo,
self.network
)
self.server = Server(
self.oTransceiver,
self.catch,
self.onodeID,
self.network,
self.cryptographyInfo,
self.remoteCatchIndex,
self.cache,
)
self.processed = []
self.proc = psutil.Process(os.getpid())
self.mTasks = {}
async def main(self):
self.mTasks["list"] = asyncio.create_task(self.spongeListen())
await asyncio.sleep(1)
# self.mTasks["pct"] = asyncio.create_task(self.oTransceiver.progressCheck())
# await asyncio.sleep(1)
self.mTasks["mon"] = asyncio.create_task(self.monitor())
await asyncio.sleep(1)
self.mTasks["announce"] = asyncio.create_task(
self.oTransceiver.announce())
await asyncio.sleep(1)
await self.server.app.start_server(port=int(webPort))
def cLog(self, priority: int, message: str):
logger = logging.getLogger(__name__)
"""
Convenience function that logs to the ui and log files
@ -140,8 +275,18 @@ class Node:
-------
None
"""
logging.log(priority, message)
self.toLog.append("[{0}]:\n{1}".format(datetime.datetime.now(), message))
logger.log(priority, message)
# pass
# logging.log(priority, message)
# self.toLog.append("[{0}]:\n{1}".format(datetime.datetime.now(), message))
async def fsInit(self):
# TODO: Flesh out and properly link everything
if not os.path.exists("data"):
os.makedirs("data")
if not os.path.exists("data/" + nodeNickname):
os.makedirs("data/" + nodeNickname)
async def monitor(self):
global tuiOb
@ -154,8 +299,8 @@ class Node:
memmb = self.proc.memory_info().rss / (1024 * 1024)
memmb = round(memmb, 2)
cpup = self.proc.cpu_percent(interval=1)
self.cLog(
20,
logger.log(
10,
" MEM: {0} mB | CPU: {1} %".format(
memmb,
cpup,
@ -164,7 +309,7 @@ class Node:
tuiOb.do_set_cpu_percent(float(cpup))
tuiOb.do_set_mem(memmb)
else:
self.cLog(20, "No TUI object, waiting 5 seconds...")
logger.log(20, "No TUI object, waiting 5 seconds...")
await asyncio.sleep(5)
async def spongeListen(self):
@ -180,12 +325,21 @@ class Node:
We use a common technique here that calls the function from our preloaded actions via dictionary entry
"""
while True:
while len(self.todo) >= 1:
todoNow = self.todo.pop()
action = todoNow["action"]
self.cLog(20, "Action: " + action)
data = todoNow["data"]
# logger.log(10, "Sponge listening...")
# logger.log(10, self.todo)
while (len(self.todo) >= 1):
try:
logger.log(10, "In todo")
todoNow = self.todo[0]
action = todoNow.getAction()
logger.log(20, "Action: " + action)
data = todoNow.getData()
logger.log(20, "Doing action")
await self.actions[action](data)
logger.log(20, "After action")
except Exception:
logger.log(20, traceback.format_exc())
self.todo.pop()
await asyncio.sleep(1)
async def action_sendToPeer(self, data: dict):
@ -205,28 +359,71 @@ class Node:
webui.serve.Server.sendToPeer: Function to actually execute the action
"""
self.server.sendToPeer(data["recipient"], data["res"])
logger.info(data)
await self.server.sendToPeer(data["recipient"], data["data"], data["target"])
async def action_sendCatch(self, data: dict):
"""
Get catch and return the data to a peer
"""
# TODO: Seperators
if "fins" in data:
res = self.catch.get(data["head"], data["body"], fins=data["fins"])
self.server.sendToPeer(data["recipient"], res)
else:
res = self.catch.get(data["head"], data["body"])
logger.info(res)
# TODO: Manually connect two nodes
r = CatchResponse(
self.onodeID,
000000,
self.onodeID,
data["recipient"],
data["recipientNode"],
self.cryptographyInfo,
res,
pskEncrypt=True
)
await self.oTransceiver.sendMessage(r)
async def action_cLog(self, data: dict):
logger.log(data["priority"], data["message"])
async def action_routeCatch(self, data: dict):
await self.server.sendToPeer(data["recipient"], data["html"], data["target"])
async def action_syncIndex(self, data: dict):
for entry in data["index"]:
self.remoteCatchIndex.addEntry(entry)
async def action_map(self, data: dict):
# TODO: Normalize node ids to strings
"""
Map new network data to internal network map
See Also
--------
Siph.network.Network: Layered graph etwork representation
Siph.network.Network: Layered graph network representation
"""
self.network.addLookup(data["onodeID"], data["mnodeID"])
self.cLog(20, "Lookup addition done")
self.network.addon(data["onodeID"])
if self.cryptographyInfo.getRecord("key", data["onodeID"]) == False:
logger.log(20, "In map")
await self.network.addLookup(data["onodeID"], data["mnodeID"])
logger.log(20, "After add lookup")
logger.log(20, "Adding public key")
self.cryptographyInfo.addPublickey(data["onodeID"], data["publicKey"])
logger.log(20, "Public key addition done")
self.network.omap.add_node(data["onodeID"])
logger.log(20, "Added node to outer map")
else:
logger.log(20, "Node already exists skipping addition to map")
if "sessionKey" not in self.cryptographyInfo.getRecord("key", data["onodeID"]).keys():
if not data["dontRespond"]:
#await self.oTransceiver.sendAnnounce(dontRespond=True)
#await asyncio.sleep(180)
h = Handshake(self.nodeInfo.get()["nodeID"], self.nodeInfo.get()["nodeID"], data["onodeID"], data["onodeID"], self.cryptographyInfo, data["onodeID"], self.onodeID)
await self.oTransceiver.sendMessage(h)
async def action_initNodeDH(self, data: dict):
async def action_initCryptography(self, data: dict):
"""
Initialize diffie hellman key exchange
@ -234,101 +431,92 @@ class Node:
--------
Cryptography.DHEFern.DHEFern: End to end encryption functionality
"""
if self.cryptographyInfo.getRecord("key", data["onodeID"]) == False:
await self.oTransceiver.initNodeDH(
self.cryptographyInfo, int(data["mnodeID"]), data["onodeID"]
)
# TODO: Response message
# TODO: Initialize if not initialized
self.cryptographyInfo.sessionSetup(data["yctx"]["sourceNode"]["val"], data["ephemeralKey"])
logger.log(20, "Cryptography handshake complete")
# TODO: Now encrypt all communications node to node with session encryption
# TODO: Expiration?
async def action_keyDeriveDH(self, data: dict):
"""
Derive key via diffie hellman key exchange
"""
async def action_hop(self, data):
try:
self.cryptographyInfo.keyDerive(
data["publicKey"],
self.cryptographyInfo.getSalt(),
data["recipientNode"],
data["params"],
r = None
if data["method"] == "get":
r = hopper.get(
data["url"],
params=data["parameters"],
followTags=["img", "script", "link"],
)
elif data["method"] == "post":
r = hopper.post(data["url"], params=data["parameters"])
if r != None:
r = HopperResponse(
self.onodeID,
000000,
self.onodeID,
data["recipient"],
data["recipientNode"],
r,
self.cryptographyInfo,
)
await self.oTransceiver.sendMessage(r)
except:
self.cLog(30, traceback.format_exc())
logger.log(30, traceback.format_exc())
async def action_routeHop(self, data: dict):
await self.server.sendToPeer(data["recipient"], data["res"], data["target"])
async def logPassLoop():
"""
Loop to pass logs up to the TUI
See Also
--------
ui.TUI: TUI implementation
"""
global tuiOb, nodeOb
while True:
await asyncio.sleep(1)
if tuiOb == None or nodeOb == None:
await asyncio.sleep(1)
elif tuiOb.done:
tuiOb.exit()
os.system("reset")
print("Terminating PierMesh service...")
nodeOb.proc.terminate()
else:
ctoLog = [l for l in nodeOb.toLog]
for l in ctoLog:
tuiOb.do_write_line(l)
nodeOb.toLog.pop()
async def action_addPSK(self, data):
# TODO: Switch to credential
self.cryptographyInfo.createEmpty(data["nodeID"])
self.cryptographyInfo.update(data["nodeID"], {"PSK": data["PSK"]})
async def main():
"""
Main method for running the PierMesh service
Kick on main method for running the PierMesh service
"""
global nodeOb
try:
nodeOb = Node()
nodeOb.cLog(20, "Starting up")
nodeOb.cLog(20, "Staggering {0} seconds, please wait".format(sys.argv[4]))
time.sleep(int(sys.argv[4]))
nodeOb.oTransceiver = Transceiver(
sys.argv[1],
nodeOb.sponge,
nodeOb.onodeID,
nodeOb.cache,
nodeOb.catch,
nodeOb.cryptographyInfo,
nodeOb.cLog,
)
nodeOb.server = Server(
nodeOb.oTransceiver,
nodeOb.catch,
nodeOb.onodeID,
nodeOb.network,
nodeOb.cLog,
)
nodeOb.mTasks["list"] = asyncio.create_task(nodeOb.spongeListen())
await asyncio.sleep(1)
nodeOb.mTasks["pct"] = asyncio.create_task(nodeOb.oTransceiver.progressCheck())
await asyncio.sleep(1)
nodeOb.mTasks["mon"] = asyncio.create_task(nodeOb.monitor())
await asyncio.sleep(1)
nodeOb.mTasks["announce"] = asyncio.create_task(nodeOb.oTransceiver.announce())
await asyncio.sleep(1)
await nodeOb.server.app.start_server(port=int(sys.argv[2]), debug=True)
except Exception:
logging.log(20, traceback.format_exc())
await nodeOb.main()
except:
logger.log(20, traceback.format_exc())
def initLogger(nodeName, tolog):
global logger
logger = logging.getLogger(__name__)
logger.propagate = False
logger.setLevel(logging.DEBUG)
vh = VHandler(logging.DEBUG, tolog)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
vh.setFormatter(formatter)
logger.addHandler(vh)
dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
fh = logging.FileHandler(f"logs/{nodeName}_{dt}.log")
fh.setFormatter(formatter)
logger.addHandler(fh)
if __name__ == "__main__":
try:
mainThread = threading.Thread(target=asyncio.run, args=(main(),))
tolog = queue.Queue()
initLogger(nodeNickname, tolog)
mainThread = threading.Thread(target=uvloop.run, args=(main(),))
mainThread.start()
lplThread = threading.Thread(target=asyncio.run, args=(logPassLoop(),))
lplThread.start()
tuiOb = TUI()
tuiOb.nodeOb = nodeOb
tuiOb.run()
# TODO: Waiting for this to finish when no tui
# TODO: Logging to screen when no tui
if showTUI:
tlog.runLogUI(tolog, nodeNickname)
except:
try:
nodeOb.cLog(30, traceback.format_exc())
except:
logging.log(30, traceback.format_exc())
print(traceback.format_exc())
os._exit(1)

View File

@ -1 +1 @@
python run.py /dev/ttyACM1 5000 server2.info 0 falin
python run.py -d /dev/ttyACM1 -p 5001 -s 75 -n falin -k jgf765!FS0+6 -o True

1
src/setup.fish Normal file
View File

@ -0,0 +1 @@
set TERM xterm-256color

1
src/setup.sh Normal file
View File

@ -0,0 +1 @@
TERM=xterm-256color

296
src/stale/WhaleSong.dhefern.py Executable file
View File

@ -0,0 +1,296 @@
import base64
import os
from cryptography.fernet import Fernet
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import dh
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
from cryptography.hazmat.primitives.serialization import (
Encoding,
NoEncryption,
ParameterFormat,
PublicFormat,
PrivateFormat,
)
import cryptography.hazmat.primitives.serialization as Serialization
import msgpack
from Daisy.Store import Store
# TODO: Different store directories per node
class DHEFern:
"""
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py>`__
Attributes
----------
cLog
Method reference to `run.Node.cLog` so we can log to the ui from here
loadedParams: dict
In memory representations of cryptography parameters
loadedKeys: dict
In memory representations of cryptography keys
nodeNickname: str
Name of node for isolating configs when running multiple nodes
cache: Components.daisy.Cache
Daisy cache for use in storing cryptography information
publicKey
Public key for node
privateKey
Private key for node
"""
def __init__(self, cache, nodeNickname, cLog):
"""
Parameters
----------
cache: Components.daisy.Cache
Reference to the node instances Daisy cache
nodeNickname: str
Node nickname for record storage
cLog
Reference to `run.Node.cLog`
"""
self.cLog = cLog
self.stores = {}
self.loadedParams = {}
self.loadedKeys = {}
self.nodeNickname = nodeNickname
self.cache = cache
if os.path.exists("daisy/cryptography/{0}/param".format(nodeNickname)) == False:
self.initStore("param")
else:
self.stores["param"] = Store("param", "cryptography", nodeNickname)
self.params = self.loadParamBytes(self.stores["param"].get()["self"])
self.cLog(20, "Param store initialized")
if os.path.exists("daisy/cryptography/{0}/key".format(nodeNickname)) == False:
self.cLog(20, "Key store DNE, initializing")
self.initStore("key")
self.genKeyPair()
else:
self.cLog(20, "Key store exists, loading")
self.stores["key"] = Store("key", "cryptography", nodeNickname)
self.cLog(20, "Store loaded")
# tks = self.stores["key"].get()
# self.publicKey = tks["self"]["publicKey"]
# self.privateKey = tks["self"]["privateKey"]
self.cLog(20, "Key store initialized")
def checkInMem(self, store: str, nodeID: str):
"""
Check if parameters or keys are loaded for node of nodeID
Parameters
----------
store: str
Whether to check loaded keys or parameters
"""
if store == "param":
return nodeID in self.loadedParams.keys()
elif store == "key":
return nodeID in self.loadedKeys.keys()
def loadRecordToMem(self, store: str, nodeID: str):
"""
Load record of nodeID from store to either keys or pameters
"""
r = self.getRecord(store, nodeID)
if r == False:
self.cLog(
30, "Tried to load nonexistent {0} for node {1}".format(store, nodeID)
)
return False
elif self.checkInMem(store, nodeID):
self.cLog(10, "{0}s already deserialized, skipping".format(store))
else:
if store == "param":
self.loadedParams[nodeID] = self.loadParamBytes(r)
elif store == "key":
self.loadedKeys[nodeID] = {
"publicKey": Serialization.load_pem_public_key(r["publicKey"]),
"privateKey": Serialization.load_pem_private_key(
r["privateKey"], None
),
}
return True
def getRecord(self, store: str, key: str):
"""
Get record from store: store with key: key
"""
r = stores[store].getRecord(key)
if r == False:
self.cLog(20, "Record does not exist")
return False
else:
return r
def initStore(self, store: str):
"""
Initialize store: store
"""
self.stores[store] = Store(store, "cryptography", self.nodeNickname)
if store == "param":
self.genParams()
self.stores[store].update("self", self.getParamsBytes(), recur=False)
elif store == "key":
self.stores[store].update("self", {}, recur=False)
else:
self.cLog(30, "Store not defined")
def genParams(self):
"""
Generate Diffie Hellman parameters
"""
params = dh.generate_parameters(generator=2, key_size=2048)
self.params = params
return params
def getParamsBytes(self):
"""
Get bytes encoded from self.parameters (TODO: Encode from store)
"""
return self.params.parameter_bytes(Encoding.PEM, ParameterFormat.PKCS3)
def loadParamBytes(self, pemBytes: bytes):
"""
Load parameters to self.params from given bytes (TODO: Load from store)
"""
self.params = Serialization.load_pem_parameters(pemBytes)
return self.params
def genKeyPair(self, paramsOverride=False, setSelf: bool = True):
"""
Generate public and private keys from self.params (TODO: Gen from passed params)
paramsOverride
False or parameters to use (TODO)
setSelf: bool
Whether to set self.privateKey and self.publicKey
"""
privateKey = self.params.generate_private_key()
if setSelf:
self.privateKey = privateKey
publicKey = privateKey.public_key()
if setSelf:
self.publicKey = publicKey
self.stores["key"].update(
"self",
{
"publicKey": self.publicKey.public_bytes(
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
),
"privateKey": self.privateKey.private_bytes(
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
),
},
)
return [privateKey, publicKey]
else:
publicKey = publicKey.public_bytes(
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
)
privateKey = privateKey.private_bytes(
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
)
return [privateKey, publicKey]
def keyDerive(self, pubKey: bytes, salt: bytes, nodeID: str, params: bytes):
"""
Derive shared key using Diffie Hellman
pubKey: bytes
Public key
nodeID: str
PierMesh node ID
params: bytes
Encryption parameters
"""
if self.checkInMem("param", nodeID) == False:
if self.getRecord("param", nodeID) == False:
self.updateStore("param", nodeID, params, recur=False)
self.loadRecordToMem("param", nodeID)
self.cLog(20, "Precheck done for key derivation")
# TODO: Load them and if private key exists load it, otherwise generate a private key
if self.checkInMem("key", nodeID) == False:
if self.getRecord("key", nodeID) == False:
privateKey, publicKey = self.genKeyPair(setSelf=False)
self.updateStore(
"key", nodeID, {"publicKey": publicKey, "privateKey": privateKey}
)
self.loadRecordToMem("key", nodeID)
sharedKey = self.loadedKeys[nodeID]["privateKey"].exchange(
Serialization.load_pem_public_key(pubKey)
)
# Perform key derivation.
self.cLog(20, "Performing key derivation")
derivedKey = HKDF(
algorithm=hashes.SHA256(), length=32, salt=salt, info=b"handshake data"
).derive(sharedKey)
self.cLog(20, "Derived key")
ederivedKey = base64.urlsafe_b64encode(derivedKey)
tr = self.getRecord("key", nodeID)
tr["derivedKey"] = ederivedKey
self.updateStore("key", nodeID, tr)
self.cLog(20, "Done with cryptography store updates")
return ederivedKey
def getSalt(self):
"""
Get random salt
"""
return os.urandom(16)
# TODO: Build in transport security (node/node)
def encrypt(self, data, nodeID: str, isDict: bool = True):
"""
Do Fernet encryption
data
Either bytes or dict to encrypt
isDict: bool
Whether data is a dictionary
"""
r = self.getRecord("key", nodeID)
if r == False:
self.cLog(20, "Node {0} not in keystore".format(nodeID))
return False
else:
derivedKey = r["derivedKey"]
fernet = Fernet(derivedKey)
if isDict:
data = msgpack.dumps(data)
token = fernet.encrypt(data)
return token
def decrypt(self, data, nodeID: str):
"""
Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load)
"""
r = self.getRecord("key", nodeID)
if r == False:
self.cLog(20, "No record of node " + nodeID)
return False
elif not "derivedKey" in r.keys():
self.cLog(20, "No key derived for node " + nodeID)
return False
else:
fernet = Fernet(self.getRecord("key", nodeID)["derivedKey"])
return msgpack.loads(fernet.decrypt(data))

View File

@ -18,7 +18,7 @@ class Router:
self.network = Network()
self.catch = Catch(walk=True)
self.cache = Cache(walk=True)
self.cLog(10, "Loading server info")
logger.log(10, "Loading server info")
self.serverInfo = self.cache.get(nfpath)
if self.serverInfo == False:
self.cache.create(nfpath, {"nodeID": random.randrange(0, 1000000)})

174
src/tlog.py Normal file
View File

@ -0,0 +1,174 @@
import curses
from curses import wrapper
import os
import logging
import queue
import shutil
import time
import psutil
# TODO: Multi node mode
# TODO: Tab per node
class VHandler(logging.Handler):
tolog = queue.Queue()
def __init__(self, level, tolog):
super().__init__(level)
self.tolog = tolog
def emit(self, record):
r = self.format(record)
self.tolog.put([r, record.levelno])
"""
def initLogger(nodeName, tolog):
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# Is tolog variable creating an error that cant be logged
# and logging is defaulting
# Check if this reference is causing an issue
vh = VHandler(logging.DEBUG, tolog)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
vh.setFormatter(formatter)
logger.addHandler(vh)
dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
fh = logging.FileHandler(f"logs/{nodeName}_{dt}.log")
fh.setFormatter(formatter)
logger.addHandler(fh)
return logger
"""
def logUI(stdscr, tolog, nodeNickname):
logcache = []
height, width = shutil.get_terminal_size((49, 27))
if height < 28 or height < 28:
print("Console too small or couldnt retrieve size, please switch to no tui mode, exiting...")
exit()
logger = logging.getLogger("__main__." + __name__)
p = psutil.Process(os.getpid())
curses.start_color()
stdscr.keypad(True)
stdscr.nodelay(True)
curses.init_color(9, 200, 200, 200)
grey = 9
curses.init_pair(1, curses.COLOR_WHITE, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_BLACK, curses.COLOR_GREEN)
curses.init_pair(3, curses.COLOR_WHITE, grey)
curses.init_color(curses.COLOR_WHITE, 1000, 1000, 1000)
curses.init_color(curses.COLOR_YELLOW, 1000, 1000, 0)
curses.init_color(curses.COLOR_GREEN, 0, 1000, 0)
curses.init_color(logging.DEBUG, 0, 1000, 0)
curses.init_pair(logging.DEBUG, 0, logging.DEBUG)
curses.init_color(logging.INFO, 1000, 1000, 1000)
curses.init_pair(logging.INFO, 0, logging.INFO)
curses.init_pair(logging.WARNING, 0, curses.COLOR_YELLOW)
curses.init_pair(logging.ERROR, curses.COLOR_WHITE, curses.COLOR_RED)
curses.init_color(logging.CRITICAL, 1000, 0, 0)
curses.init_pair(logging.CRITICAL, curses.COLOR_WHITE, logging.CRITICAL)
icon = []
with open("piermesh-mini.ascii", "r") as f:
icon = f.read().split("\n")
stdscr.bkgd(' ', curses.color_pair(1))
stdscr.clear()
iwin = curses.newwin(13, 50, 25, 0)
iwin.bkgd(" ", curses.color_pair(3))
for it, line in enumerate(icon[:-1]):
iwin.addstr(it+1, 0, " " + line, curses.color_pair(3))
iwin.addstr(10, 2, " Keys: q to abort, ↑ scroll up", curses.color_pair(2))
iwin.addstr(11, 2, " ↓ scroll down ", curses.color_pair(2))
head = curses.newwin(4, 50, 0, 0)
head.bkgd(" ", curses.color_pair(3))
head.addstr(1, 1, "PierMesh TUI", curses.color_pair(3))
head.addstr(2, 1, "Logs:", curses.color_pair(3))
head.border(' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ')
logpad = curses.newpad(1000, 300)
logpad.bkgdset(" ", curses.color_pair(2))
start = 0
gen = 0
lastr = time.time()
while True:
if gen == 0 or ((time.time()-lastr) > 2):
lastr = time.time()
stdscr.addstr(23, 0, " System usage: ", curses.color_pair(3))
mem = round(p.memory_info().rss/(1024*1024), 2)
cpu = p.cpu_percent(interval=0.1)
stdscr.addstr(
24,
0,
f" MEM: {mem} Mb CPU: {cpu}% ",
curses.color_pair(2)
)
if tolog.empty() != True:
while True:
logcache.insert(0, tolog.get())
tolog.task_done()
if tolog.qsize() < 1:
break
if len(logcache) > 100:
logcache = logcache[:100]
logpad.clear()
nextOffset = 0
for it, message in enumerate(logcache):
msg = message[0]
if len(msg) > width:
msgPartA = msg[:width]
msgPartB = msg[width:]
if len(msgPartB) > width:
msgPartB = msgPartB[:width]
logpad.addstr(
it+nextOffset, 0, " " + msgPartA + " ", curses.color_pair(message[1]))
logpad.addstr(
it+nextOffset+1, 0, " " + msgPartB + " ", curses.color_pair(message[1]))
nextOffset = 1
else:
logpad.addstr(
it+nextOffset, 0, " " + msg + " ", curses.color_pair(message[1]))
nextOffset = 0
logpad.refresh(start, 0, 4, 0, 22, width)
stdscr.refresh()
if gen < 1:
iwin.refresh()
head.refresh()
gen += 1
ch = stdscr.getch()
if ch == ord("q"):
curses.nocbreak()
stdscr.keypad(False)
curses.echo()
curses.endwin()
os._exit(1)
elif ch == curses.KEY_UP:
if start != 0:
start -= 1
elif ch == curses.KEY_DOWN:
if start < tolog.qsize():
start += 1
def runLogUI(tolog, nodeNickname):
wrapper(logUI, tolog, nodeNickname)

0
src/tui.py Normal file
View File

View File

@ -1,5 +1,6 @@
# TODO: DEPRECATE
from textual.app import App, ComposeResult
from textual.widgets import Log, Label, Footer, Header, ProgressBar
from textual.widgets import Log, Label, Footer, Header, ProgressBar, Input, Button
from textual.binding import Binding
from textual.containers import Horizontal, Vertical
import sys, os
@ -24,6 +25,7 @@ class TUI(App):
Whether the TUI has been killed
"""
todo = []
visibleLogo = True
nodeOb = None
done = False
@ -70,7 +72,15 @@ class TUI(App):
Load the ascii art for display on the left label
"""
yield Header(icon="P")
yield Label(ascii, classes="largeLabel", name="logo", id="logo")
yield Vertical(
Label(ascii, classes="largeLabel", name="logo", id="logo"),
Label("Add/set pre shared key for node\n"),
Label("Node ID:"),
Input(placeholder="000000", type="integer", max_length=6, name="pskNodeID", id="pskNodeID"),
Label("PSK:"),
Input(type="text", max_length=6, name="psk", id="psk"),
Button("Add/set PSK", name="addPSK", id="addPSK"),
)
yield Vertical(
Log(auto_scroll=True, classes="baseLog"),
Label("CPU usage:", name="cpul", id="cpul"),
@ -79,6 +89,16 @@ class TUI(App):
)
yield Footer()
def on_button_pressed(self, event: Button.Pressed) -> None:
if event.button.id == "addPSK":
self.todo.append({
"action": "addPSK",
"data": {
"nodeID": self.query_one("#pskNodeID").value.zpad(6),
"PSK": self.query_one("#PSK").value
}
})
def do_write_line(self, logLine: str):
"""
Write line to the logs panel
@ -127,3 +147,4 @@ class TUI(App):
if __name__ == "__main__":
app = TUI()
app.run()