Breaking push to show progress
This commit is contained in:
parent
632ab76330
commit
ba5bdb966b
|
@ -42,5 +42,5 @@ This program is free software: you can redistribute it and/or modify it under th
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||||
|
|
||||||
You should have received a copy of the GNU General Public License along with this program. If not, see [https://www.gnu.org/licenses/](https://www.gnu.org/licenses/).
|
You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
|
||||||
```
|
```
|
||||||
|
|
|
@ -13,3 +13,4 @@ sphinx
|
||||||
textual
|
textual
|
||||||
textual-dev
|
textual-dev
|
||||||
sphinx-markdown-builder==0.6.6
|
sphinx-markdown-builder==0.6.6
|
||||||
|
pycryptodome
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
# DONT TOUCH THIS SECTION UNLESS YOU KNOW WHAT YOURE DOING
|
||||||
|
[DEFAULT]
|
||||||
|
Nickname = node00
|
||||||
|
StartupDelay = 0
|
||||||
|
WebUIPort = 5000
|
||||||
|
ShowTUI = True
|
||||||
|
|
||||||
|
[OPERATOR_REQUIRED]
|
||||||
|
# TransceiverPort = /dev/ttyACM0
|
||||||
|
|
||||||
|
# DO YOUR SETTINGS HERE
|
||||||
|
[OPERATOR_OVERRIDES]
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
# DONT TOUCH THIS SECTION UNLESS YOU KNOW WHAT YOURE DOING
|
||||||
|
[DEFAULT]
|
||||||
|
Nickname = node00
|
||||||
|
StartupDelay = 0
|
||||||
|
WebUIPort = 5000
|
||||||
|
ShowTUI = True
|
||||||
|
|
||||||
|
[OPERATOR_REQUIRED]
|
||||||
|
# TransceiverPort = /dev/ttyACM0
|
||||||
|
|
||||||
|
# DO YOUR SETTINGS HERE
|
||||||
|
[OPERATOR_OVERRIDES]
|
||||||
|
|
|
@ -1,10 +1,37 @@
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
import msgpack
|
import msgpack
|
||||||
|
|
||||||
import lzma
|
import lzma
|
||||||
from Packets.Message import Message
|
import base64
|
||||||
|
import mimetypes
|
||||||
|
|
||||||
|
from Packets.Messages.Protocols.hopper.Response import HopperResponse
|
||||||
|
|
||||||
|
|
||||||
def get(url: str, params=None):
|
def downloadFile(url, text=True, mimeType=None):
|
||||||
|
fbytes = b""
|
||||||
|
with requests.get(url, stream=True) as r:
|
||||||
|
r.raise_for_status()
|
||||||
|
for chunk in r.iter_content(chunk_size=8192):
|
||||||
|
fbytes += chunk
|
||||||
|
if text:
|
||||||
|
return fbytes.decode("utf-8")
|
||||||
|
else:
|
||||||
|
if mimeType == None:
|
||||||
|
mimeType, encoding = mimetypes.guess_type(url)
|
||||||
|
if mimeType == None:
|
||||||
|
raise Error(
|
||||||
|
"Couldnt guess mime type and none was supplied, cant encode to data url"
|
||||||
|
)
|
||||||
|
b64str = base64.b64encode(fbytes).decode("utf-8")
|
||||||
|
dataUrl = "data:{0};base64,{1}".format(mimeType, b64str)
|
||||||
|
return dataUrl
|
||||||
|
|
||||||
|
|
||||||
|
def get(url: str, params=None, followTags=None):
|
||||||
"""
|
"""
|
||||||
http/s get request
|
http/s get request
|
||||||
|
|
||||||
|
@ -14,10 +41,38 @@ def get(url: str, params=None):
|
||||||
|
|
||||||
params
|
params
|
||||||
Requests (library) parameters
|
Requests (library) parameters
|
||||||
|
|
||||||
|
followTags
|
||||||
|
None or list of tags to download the src/href from
|
||||||
"""
|
"""
|
||||||
r = requests.get(url, params=params)
|
r = requests.get(url, params=params)
|
||||||
r = {"response": r.text, "code": r.status_code}
|
r = {
|
||||||
return Message(lzma.compress(msgpack.dumps(r))).get()
|
"response": r.text,
|
||||||
|
"code": r.status_code,
|
||||||
|
"content-type": r.headers.get("content-type"),
|
||||||
|
}
|
||||||
|
# TODO: Reject followtags if content type is other then html
|
||||||
|
if followTags != None:
|
||||||
|
soup = BeautifulSoup(r["response"], "html.parser")
|
||||||
|
# TODO: Checking for relative links
|
||||||
|
for tag in followTags:
|
||||||
|
if tag in ["img", "video"]:
|
||||||
|
for elem in soup.find_all(tag):
|
||||||
|
elem["src"] = downloadFile(elem["src"], text=False)
|
||||||
|
elif tag in ["link"]:
|
||||||
|
for elem in soup.find_all(tag):
|
||||||
|
if elem["rel"] == "stylesheet":
|
||||||
|
style = downloadFile(elem["href"])
|
||||||
|
elem.decompose()
|
||||||
|
soup.head.append_tag(soup.new_tag("style", string=style))
|
||||||
|
elif tag == "script":
|
||||||
|
for elem in soup.find_all(tag):
|
||||||
|
script = downloadFile(elem["src"])
|
||||||
|
elem["src"] = ""
|
||||||
|
elem.string = script
|
||||||
|
r["response"] = soup.text
|
||||||
|
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
def post(url: str, params=None):
|
def post(url: str, params=None):
|
||||||
|
@ -33,4 +88,4 @@ def post(url: str, params=None):
|
||||||
"""
|
"""
|
||||||
r = requests.post(url, data=params)
|
r = requests.post(url, data=params)
|
||||||
r = {"response": r.text, "code": r.status_code}
|
r = {"response": r.text, "code": r.status_code}
|
||||||
return Message(lzma.compress(msgpack.dumps(r))).get()
|
return r
|
||||||
|
|
|
@ -1,24 +1,22 @@
|
||||||
import base64
|
import base64
|
||||||
import os
|
import os
|
||||||
from cryptography.fernet import Fernet
|
import lzma
|
||||||
from cryptography.hazmat.primitives import hashes
|
|
||||||
from cryptography.hazmat.primitives.asymmetric import dh
|
|
||||||
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
|
||||||
from cryptography.hazmat.primitives.serialization import (
|
|
||||||
Encoding,
|
|
||||||
NoEncryption,
|
|
||||||
ParameterFormat,
|
|
||||||
PublicFormat,
|
|
||||||
PrivateFormat,
|
|
||||||
)
|
|
||||||
import cryptography.hazmat.primitives.serialization as Serialization
|
|
||||||
import msgpack
|
import msgpack
|
||||||
|
|
||||||
|
from Crypto.PublicKey import ECC
|
||||||
|
from Crypto.Hash import SHAKE128
|
||||||
|
from Crypto.Protocol.DH import key_agreement
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
|
||||||
from Daisy.Store import Store
|
from Daisy.Store import Store
|
||||||
|
|
||||||
# TODO: Different store directories per node
|
# TODO: Different store directories per node
|
||||||
|
# TODO: First time psk transport initiation
|
||||||
|
# Add this credential manually, its picked up and used when the two nodes try to communicate before the session is encrypted
|
||||||
|
|
||||||
|
|
||||||
class DHEFern:
|
class Transport:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py>`__
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py>`__
|
||||||
|
@ -63,16 +61,9 @@ class DHEFern:
|
||||||
"""
|
"""
|
||||||
self.cLog = cLog
|
self.cLog = cLog
|
||||||
self.stores = {}
|
self.stores = {}
|
||||||
self.loadedParams = {}
|
|
||||||
self.loadedKeys = {}
|
self.loadedKeys = {}
|
||||||
self.nodeNickname = nodeNickname
|
self.nodeNickname = nodeNickname
|
||||||
self.cache = cache
|
self.cache = cache
|
||||||
if os.path.exists("daisy/cryptography/{0}/param".format(nodeNickname)) == False:
|
|
||||||
self.initStore("param")
|
|
||||||
else:
|
|
||||||
self.stores["param"] = Store("param", "cryptography", nodeNickname)
|
|
||||||
self.params = self.loadParamBytes(self.stores["param"].get()["self"])
|
|
||||||
self.cLog(20, "Param store initialized")
|
|
||||||
if os.path.exists("daisy/cryptography/{0}/key".format(nodeNickname)) == False:
|
if os.path.exists("daisy/cryptography/{0}/key".format(nodeNickname)) == False:
|
||||||
self.cLog(20, "Key store DNE, initializing")
|
self.cLog(20, "Key store DNE, initializing")
|
||||||
self.initStore("key")
|
self.initStore("key")
|
||||||
|
@ -81,12 +72,12 @@ class DHEFern:
|
||||||
self.cLog(20, "Key store exists, loading")
|
self.cLog(20, "Key store exists, loading")
|
||||||
self.stores["key"] = Store("key", "cryptography", nodeNickname)
|
self.stores["key"] = Store("key", "cryptography", nodeNickname)
|
||||||
self.cLog(20, "Store loaded")
|
self.cLog(20, "Store loaded")
|
||||||
# tks = self.stores["key"].get()
|
|
||||||
# self.publicKey = tks["self"]["publicKey"]
|
|
||||||
# self.privateKey = tks["self"]["privateKey"]
|
|
||||||
self.cLog(20, "Key store initialized")
|
self.cLog(20, "Key store initialized")
|
||||||
|
|
||||||
def checkInMem(self, store: str, nodeID: str):
|
def kdf(self, bytesX):
|
||||||
|
return SHAKE128.new(bytesX).read(32)
|
||||||
|
|
||||||
|
def checkInMem(self, store: str, nodeID: str, checkFieldsExist=[]):
|
||||||
"""
|
"""
|
||||||
Check if parameters or keys are loaded for node of nodeID
|
Check if parameters or keys are loaded for node of nodeID
|
||||||
|
|
||||||
|
@ -99,7 +90,14 @@ class DHEFern:
|
||||||
if store == "param":
|
if store == "param":
|
||||||
return nodeID in self.loadedParams.keys()
|
return nodeID in self.loadedParams.keys()
|
||||||
elif store == "key":
|
elif store == "key":
|
||||||
return nodeID in self.loadedKeys.keys()
|
isExists = nodeID in self.loadedKeys.keys()
|
||||||
|
if isExists:
|
||||||
|
for field in checkFieldsExist:
|
||||||
|
if not (field in self.loadedKeys[nodeID].keys()):
|
||||||
|
if field == "staticKey":
|
||||||
|
self.genStaticKey(nodeID)
|
||||||
|
elif field == "ourEphemeralKey":
|
||||||
|
self.genOurEphemeralKey(nodeID)
|
||||||
|
|
||||||
def loadRecordToMem(self, store: str, nodeID: str):
|
def loadRecordToMem(self, store: str, nodeID: str):
|
||||||
"""
|
"""
|
||||||
|
@ -149,28 +147,7 @@ class DHEFern:
|
||||||
else:
|
else:
|
||||||
self.cLog(30, "Store not defined")
|
self.cLog(30, "Store not defined")
|
||||||
|
|
||||||
def genParams(self):
|
def genStaticKey(self, onodeID, paramsOverride=False):
|
||||||
"""
|
|
||||||
Generate Diffie Hellman parameters
|
|
||||||
"""
|
|
||||||
params = dh.generate_parameters(generator=2, key_size=2048)
|
|
||||||
self.params = params
|
|
||||||
return params
|
|
||||||
|
|
||||||
def getParamsBytes(self):
|
|
||||||
"""
|
|
||||||
Get bytes encoded from self.parameters (TODO: Encode from store)
|
|
||||||
"""
|
|
||||||
return self.params.parameter_bytes(Encoding.PEM, ParameterFormat.PKCS3)
|
|
||||||
|
|
||||||
def loadParamBytes(self, pemBytes: bytes):
|
|
||||||
"""
|
|
||||||
Load parameters to self.params from given bytes (TODO: Load from store)
|
|
||||||
"""
|
|
||||||
self.params = Serialization.load_pem_parameters(pemBytes)
|
|
||||||
return self.params
|
|
||||||
|
|
||||||
def genKeyPair(self, paramsOverride=False, setSelf: bool = True):
|
|
||||||
"""
|
"""
|
||||||
Generate public and private keys from self.params (TODO: Gen from passed params)
|
Generate public and private keys from self.params (TODO: Gen from passed params)
|
||||||
|
|
||||||
|
@ -180,82 +157,51 @@ class DHEFern:
|
||||||
setSelf: bool
|
setSelf: bool
|
||||||
Whether to set self.privateKey and self.publicKey
|
Whether to set self.privateKey and self.publicKey
|
||||||
"""
|
"""
|
||||||
privateKey = self.params.generate_private_key()
|
staticKey = ECC.generate(curve="p256")
|
||||||
if setSelf:
|
self.stores["key"].update(
|
||||||
self.privateKey = privateKey
|
onodeID,
|
||||||
publicKey = privateKey.public_key()
|
{
|
||||||
if setSelf:
|
"staticKey": staticKey.export_key(
|
||||||
self.publicKey = publicKey
|
format="PEM", prot_params={"iteration_count": 131072}
|
||||||
self.stores["key"].update(
|
|
||||||
"self",
|
|
||||||
{
|
|
||||||
"publicKey": self.publicKey.public_bytes(
|
|
||||||
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
|
|
||||||
),
|
|
||||||
"privateKey": self.privateKey.private_bytes(
|
|
||||||
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return [privateKey, publicKey]
|
|
||||||
else:
|
|
||||||
publicKey = publicKey.public_bytes(
|
|
||||||
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
|
|
||||||
)
|
|
||||||
privateKey = privateKey.private_bytes(
|
|
||||||
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
|
|
||||||
)
|
|
||||||
return [privateKey, publicKey]
|
|
||||||
|
|
||||||
def keyDerive(self, pubKey: bytes, salt: bytes, nodeID: str, params: bytes):
|
|
||||||
"""
|
|
||||||
Derive shared key using Diffie Hellman
|
|
||||||
|
|
||||||
pubKey: bytes
|
|
||||||
Public key
|
|
||||||
|
|
||||||
nodeID: str
|
|
||||||
PierMesh node ID
|
|
||||||
|
|
||||||
params: bytes
|
|
||||||
Encryption parameters
|
|
||||||
"""
|
|
||||||
if self.checkInMem("param", nodeID) == False:
|
|
||||||
if self.getRecord("param", nodeID) == False:
|
|
||||||
self.updateStore("param", nodeID, params, recur=False)
|
|
||||||
self.loadRecordToMem("param", nodeID)
|
|
||||||
self.cLog(20, "Precheck done for key derivation")
|
|
||||||
|
|
||||||
# TODO: Load them and if private key exists load it, otherwise generate a private key
|
|
||||||
if self.checkInMem("key", nodeID) == False:
|
|
||||||
if self.getRecord("key", nodeID) == False:
|
|
||||||
privateKey, publicKey = self.genKeyPair(setSelf=False)
|
|
||||||
self.updateStore(
|
|
||||||
"key", nodeID, {"publicKey": publicKey, "privateKey": privateKey}
|
|
||||||
)
|
)
|
||||||
self.loadRecordToMem("key", nodeID)
|
},
|
||||||
|
|
||||||
sharedKey = self.loadedKeys[nodeID]["privateKey"].exchange(
|
|
||||||
Serialization.load_pem_public_key(pubKey)
|
|
||||||
)
|
)
|
||||||
# Perform key derivation.
|
self.loadedKeys[onodeID] = {"staticKey": staticKey}
|
||||||
self.cLog(20, "Performing key derivation")
|
|
||||||
derivedKey = HKDF(
|
|
||||||
algorithm=hashes.SHA256(), length=32, salt=salt, info=b"handshake data"
|
|
||||||
).derive(sharedKey)
|
|
||||||
self.cLog(20, "Derived key")
|
|
||||||
ederivedKey = base64.urlsafe_b64encode(derivedKey)
|
|
||||||
tr = self.getRecord("key", nodeID)
|
|
||||||
tr["derivedKey"] = ederivedKey
|
|
||||||
self.updateStore("key", nodeID, tr)
|
|
||||||
self.cLog(20, "Done with cryptography store updates")
|
|
||||||
return ederivedKey
|
|
||||||
|
|
||||||
def getSalt(self):
|
def genOurEphemeralKey(self, onodeID):
|
||||||
"""
|
ourEphemeralKey = ECC.generate(curve="p256")
|
||||||
Get random salt
|
self.loadedKeys[onodeID]["ourEphemeralKey"] = ourEphemeralKey
|
||||||
"""
|
|
||||||
return os.urandom(16)
|
def addPublickey(self, onodeID, publicKey):
|
||||||
|
self.stores["key"].update(onodeID, {"publicKey": publicKey})
|
||||||
|
self.loadedKeys[onodeID]["publicKey"] = ECC.import_key(publicKey)
|
||||||
|
|
||||||
|
def addPeerEphemeralKey(self, onodeID, peerEphemeralKey):
|
||||||
|
self.loadedKeys[onodeID]["peerEphemeralKey"] = ECC.import_key(peerEphemeralKey)
|
||||||
|
|
||||||
|
def sessionSetup(self, onodeID, publicKey, peerEphemeralKey):
|
||||||
|
# TODO: Deeper checking before loading
|
||||||
|
if self.getRecord("key", onodeID) == False:
|
||||||
|
self.stores["key"].createEmpty(onodeID)
|
||||||
|
self.genStaticKey(onodeID)
|
||||||
|
self.genOurEphemeralKey(onodeID)
|
||||||
|
else:
|
||||||
|
self.loadRecordToMem("key", onodeID)
|
||||||
|
self.addPublickey(onodeID, publicKey)
|
||||||
|
self.addPeerEphemeralKey(onodeID, peerEphemeralKey)
|
||||||
|
self.generateSessionKey(onodeID)
|
||||||
|
|
||||||
|
def generateSessionKey(self, onodeID):
|
||||||
|
keysOb = self.loadedKeys[onodeID]
|
||||||
|
sessionKey = key_agreement(
|
||||||
|
static_priv=keysOb["staticKey"],
|
||||||
|
static_pub=keysOb["publicKey"],
|
||||||
|
eph_priv=keysOb["ourEphemeralKey"],
|
||||||
|
eph_pub=keysOb["peerEphemeralKey"],
|
||||||
|
kdf=self.kdf,
|
||||||
|
)
|
||||||
|
self.loadedKeys[onodeID]["sessionKey"] = sessionKey
|
||||||
|
return sessionKey
|
||||||
|
|
||||||
# TODO: Build in transport security (node/node)
|
# TODO: Build in transport security (node/node)
|
||||||
def encrypt(self, data, nodeID: str, isDict: bool = True):
|
def encrypt(self, data, nodeID: str, isDict: bool = True):
|
||||||
|
@ -268,29 +214,53 @@ class DHEFern:
|
||||||
isDict: bool
|
isDict: bool
|
||||||
Whether data is a dictionary
|
Whether data is a dictionary
|
||||||
"""
|
"""
|
||||||
r = self.getRecord("key", nodeID)
|
if (nodeID in self.loadedKeys.keys()) == False:
|
||||||
if r == False:
|
self.cLog(20, "Node {0} not in keychain".format(nodeID))
|
||||||
self.cLog(20, "Node {0} not in keystore".format(nodeID))
|
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
derivedKey = r["derivedKey"]
|
r = self.loadedKeys[nodeID]
|
||||||
fernet = Fernet(derivedKey)
|
if "sessionKey" in r.keys():
|
||||||
if isDict:
|
sessionKey = r["sessionKey"]
|
||||||
data = msgpack.dumps(data)
|
cipher = AES.new(sessionKey, AES.MODE_GCM)
|
||||||
token = fernet.encrypt(data)
|
nonce = cipher.nonce
|
||||||
return token
|
if isDict:
|
||||||
|
data = msgpack.dumps(data)
|
||||||
|
ciphertext, tag = cipher.encrypt_and_digest(data)
|
||||||
|
return (nonce, ciphertext, tag)
|
||||||
|
elif "PSK" in r.keys():
|
||||||
|
cipher = AES.new(r["PSK"], AES.MODE_GCM)
|
||||||
|
nonce = cipher.nonce
|
||||||
|
if isDict:
|
||||||
|
data = msgpack.dumps(data)
|
||||||
|
ciphertext, tag = cipher.encrypt_and_digest(data)
|
||||||
|
return (nonce, ciphertext, tag)
|
||||||
|
else:
|
||||||
|
self.cLog(20, "Node {0} does not have session key".format(nodeID))
|
||||||
|
|
||||||
def decrypt(self, data, nodeID: str):
|
def decrypt(self, data, nodeID: str, nonce, tag):
|
||||||
"""
|
"""
|
||||||
Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load)
|
Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load)
|
||||||
"""
|
"""
|
||||||
r = self.getRecord("key", nodeID)
|
if (nodeID in self.loadedKeys.keys()) == False:
|
||||||
if r == False:
|
self.cLog(20, "Node {0} not in keychain".format(nodeID))
|
||||||
self.cLog(20, "No record of node " + nodeID)
|
|
||||||
return False
|
|
||||||
elif not "derivedKey" in r.keys():
|
|
||||||
self.cLog(20, "No key derived for node " + nodeID)
|
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
fernet = Fernet(self.getRecord("key", nodeID)["derivedKey"])
|
r = self.loadedKeys[nodeID]
|
||||||
return msgpack.loads(fernet.decrypt(data))
|
if "sessionKey" in r.keys():
|
||||||
|
sessionKey = r["sessionKey"]
|
||||||
|
cipher = AES.new(sessionKey, AES.MODE_GCM, nonce=nonce)
|
||||||
|
|
||||||
|
data = cipher.decrypt(data)
|
||||||
|
data = msgpack.loads(lzma.decompress(data))
|
||||||
|
|
||||||
|
return data
|
||||||
|
elif "PSK" in r.keys():
|
||||||
|
cipher = AES.new(r["PSK"], AES.MODE_GCM, nonce=nonce)
|
||||||
|
|
||||||
|
data = cipher.decrypt(data)
|
||||||
|
data = msgpack.loads(lzma.decompress(data))
|
||||||
|
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
self.cLog(20, "Node {0} does not have session key".format(nodeID))
|
||||||
|
return False
|
||||||
|
|
|
@ -62,7 +62,7 @@ class Cache:
|
||||||
tpath = root + "/" + p
|
tpath = root + "/" + p
|
||||||
self.data[tpath] = Daisy(tpath)
|
self.data[tpath] = Daisy(tpath)
|
||||||
|
|
||||||
def create(self, path: str, data: dict):
|
def create(self, path: str, data: dict, remote=False):
|
||||||
"""
|
"""
|
||||||
Create new record
|
Create new record
|
||||||
|
|
||||||
|
@ -74,12 +74,16 @@ class Cache:
|
||||||
data: dict
|
data: dict
|
||||||
Data to populate record with
|
Data to populate record with
|
||||||
"""
|
"""
|
||||||
with open(self.path + "/" + path, "wb") as f:
|
if remote == False:
|
||||||
f.write(msgpack.dumps(data))
|
with open(self.path + "/" + path, "wb") as f:
|
||||||
# logging.log(10, "Done creating record")
|
f.write(msgpack.dumps(data))
|
||||||
self.data[path] = Daisy(self.path + "/" + path)
|
# logging.log(10, "Done creating record")
|
||||||
# logging.log(10, "Done loading to Daisy")
|
self.data[path] = Daisy(self.path + "/" + path)
|
||||||
return self.data[path]
|
# logging.log(10, "Done loading to Daisy")
|
||||||
|
return self.data[path]
|
||||||
|
else:
|
||||||
|
self.data[path] = Ref(path, remote)
|
||||||
|
return self.data[path]
|
||||||
|
|
||||||
def get(self, path: str):
|
def get(self, path: str):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
from Daisy.Cache import Cache
|
from Daisy.Cache import Cache
|
||||||
|
from Daisy.Ref import Ref
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
|
||||||
class Catch(Cache):
|
class Catch(Cache):
|
||||||
|
@ -34,7 +36,7 @@ class Catch(Cache):
|
||||||
return super().get(path)
|
return super().get(path)
|
||||||
|
|
||||||
# TODO: Rename
|
# TODO: Rename
|
||||||
def get(self, head: str, tail: str, fins=None):
|
def get(self, head: str, body: str, fins=None):
|
||||||
"""
|
"""
|
||||||
Get catch by pieces
|
Get catch by pieces
|
||||||
|
|
||||||
|
@ -49,10 +51,14 @@ class Catch(Cache):
|
||||||
fins
|
fins
|
||||||
List of (maximum 8 characters) strings at the end of the catch oe None if none
|
List of (maximum 8 characters) strings at the end of the catch oe None if none
|
||||||
"""
|
"""
|
||||||
r = self.search({"head": head, "tail": tail})
|
r = ""
|
||||||
|
if fins != None and fins != "":
|
||||||
|
r = self.search({"head": head, "body": body, "fins": fins})
|
||||||
|
else:
|
||||||
|
r = self.search({"head": head, "body": body})
|
||||||
return r[0][1]["html"]
|
return r[0][1]["html"]
|
||||||
|
|
||||||
def addc(self, peer, node, seperator, head, tail, data, fins=None):
|
def addc(self, peer, node, seperator, head, body, data, fins=None, remote=False):
|
||||||
tnpath = "catch/" + node
|
tnpath = "catch/" + node
|
||||||
if os.path.exists(tnpath) != True:
|
if os.path.exists(tnpath) != True:
|
||||||
os.makedirs(tnpath)
|
os.makedirs(tnpath)
|
||||||
|
@ -62,8 +68,20 @@ class Catch(Cache):
|
||||||
sid = str(random.randrange(0, 999999)).zfill(6)
|
sid = str(random.randrange(0, 999999)).zfill(6)
|
||||||
data["seperator"] = seperator
|
data["seperator"] = seperator
|
||||||
data["head"] = head
|
data["head"] = head
|
||||||
data["tail"] = tail
|
data["body"] = body
|
||||||
if fins != None:
|
if fins != None:
|
||||||
data["fins"] = fins
|
data["fins"] = fins
|
||||||
res = self.create("{0}/{1}/{2}".format(node, peer, sid), data)
|
res = self.create("{0}/{1}/{2}".format(node, peer, sid), data, remote=remote)
|
||||||
return [sid, res]
|
return [sid, res]
|
||||||
|
|
||||||
|
def genIndex(self, onodeID):
|
||||||
|
dirList = []
|
||||||
|
for k in self.data.keys():
|
||||||
|
curCatch = {"path": k, "resNodeID": onodeID, "sid": str(uuid.uuid4())}
|
||||||
|
curCatch += self.data[k]
|
||||||
|
del curCatch["data"]
|
||||||
|
dirList.append(curCatch)
|
||||||
|
return dirList
|
||||||
|
|
||||||
|
def mergeIndex(self, remoteIndex):
|
||||||
|
self.remoteCatchesMap += remoteIndex
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
from Daisy.Daisy import Daisy
|
||||||
|
|
||||||
|
|
||||||
|
class Credential(Daisy):
|
||||||
|
def __init__(self, nodeNickname, credentialName, extension, daisyCryptography):
|
||||||
|
fname = "data/{0}/{1}.{2}".format(nodeNickname, credentialName, extension)
|
||||||
|
super().__init__(
|
||||||
|
fname,
|
||||||
|
daisyCryptography,
|
||||||
|
)
|
|
@ -0,0 +1,33 @@
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
|
||||||
|
class SteelPetal:
|
||||||
|
def __init__(self, key, cLog, nonce=None, testData=None):
|
||||||
|
self.cLog = cLog
|
||||||
|
if nonce == None:
|
||||||
|
self.cipher = AES.new(key, AES.MODE_GCM)
|
||||||
|
self.nonce = self.cipher.nonce
|
||||||
|
else:
|
||||||
|
self.cipher = AES.new(key, AES.MODE_GCM, nonce=nonce)
|
||||||
|
self.nonce = nonce
|
||||||
|
if testData != None:
|
||||||
|
try:
|
||||||
|
self.cipher.decrypt(testData)
|
||||||
|
except:
|
||||||
|
self.cLog(20, traceback.format_exec())
|
||||||
|
return False
|
||||||
|
|
||||||
|
def encrypt(self, data):
|
||||||
|
try:
|
||||||
|
return self.cipher.encrypt_and_digest(data)
|
||||||
|
except:
|
||||||
|
self.cLog(20, traceback.format_exec())
|
||||||
|
return False
|
||||||
|
|
||||||
|
def decrypt(self, data):
|
||||||
|
try:
|
||||||
|
return self.cipher.decrypt(data)
|
||||||
|
except:
|
||||||
|
self.cLog(20, traceback.format_exec())
|
||||||
|
return False
|
|
@ -40,12 +40,15 @@ class Daisy:
|
||||||
In memory representation
|
In memory representation
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# TODO: Strong encrypt
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
filepath: str,
|
filepath: str,
|
||||||
|
daisyCryptography,
|
||||||
templates: dict = {},
|
templates: dict = {},
|
||||||
template: bool = False,
|
template: bool = False,
|
||||||
prefillDict: bool = False,
|
prefillDict: bool = False,
|
||||||
|
remote=False,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Parameters
|
Parameters
|
||||||
|
@ -62,27 +65,32 @@ class Daisy:
|
||||||
prefillDict: bool
|
prefillDict: bool
|
||||||
Whether to fill the record with a template
|
Whether to fill the record with a template
|
||||||
"""
|
"""
|
||||||
|
self.remote = False
|
||||||
self.filepath = filepath
|
self.filepath = filepath
|
||||||
if os.path.exists(filepath) != True:
|
if remote != False:
|
||||||
with open(filepath, "wb") as f:
|
self.remote = True
|
||||||
if template != False:
|
self.remoteNodeID = remote
|
||||||
if template in templates.keys():
|
|
||||||
t = templates[template].get()
|
|
||||||
if prefillDict != False:
|
|
||||||
for k in prefillDict.keys():
|
|
||||||
t[k] = prefillDict[k]
|
|
||||||
f.write(msgpack.dumps(t))
|
|
||||||
self.msg = t
|
|
||||||
else:
|
|
||||||
print("No such template as: " + template)
|
|
||||||
else:
|
|
||||||
f.write(msgpack.dumps({}))
|
|
||||||
self.msg = {}
|
|
||||||
elif os.path.isdir(filepath):
|
|
||||||
self.msg = "directory"
|
|
||||||
else:
|
else:
|
||||||
with open(filepath, "rb") as f:
|
if os.path.exists(filepath) != True:
|
||||||
self.msg = msgpack.loads(f.read())
|
with open(filepath, "wb") as f:
|
||||||
|
if template != False:
|
||||||
|
if template in templates.keys():
|
||||||
|
t = templates[template].get()
|
||||||
|
if prefillDict != False:
|
||||||
|
for k in prefillDict.keys():
|
||||||
|
t[k] = prefillDict[k]
|
||||||
|
f.write(msgpack.dumps(t))
|
||||||
|
self.msg = t
|
||||||
|
else:
|
||||||
|
print("No such template as: " + template)
|
||||||
|
else:
|
||||||
|
f.write(msgpack.dumps({}))
|
||||||
|
self.msg = {}
|
||||||
|
elif os.path.isdir(filepath):
|
||||||
|
self.msg = "directory"
|
||||||
|
else:
|
||||||
|
with open(filepath, "rb") as f:
|
||||||
|
self.msg = msgpack.loads(f.read())
|
||||||
|
|
||||||
# Use override for updating
|
# Use override for updating
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,52 @@
|
||||||
|
from Daisy.Daisy import Daisy
|
||||||
|
|
||||||
|
|
||||||
|
class Index(Daisy):
|
||||||
|
def __init__(self, nodeNickname, prefill=[], indexedFields=[], autoIndex=True):
|
||||||
|
if autoIndex:
|
||||||
|
if prefill != []:
|
||||||
|
if indexedFields == []:
|
||||||
|
for i in prefill:
|
||||||
|
# TODO: Value type annotation
|
||||||
|
# TODO: Value weighting
|
||||||
|
for k, v in i.items():
|
||||||
|
indexedFields.append(k)
|
||||||
|
indexedFields = list(set(indexedFields))
|
||||||
|
super().__init__(
|
||||||
|
nodeNickname + ".index",
|
||||||
|
prefillDict={"_index": prefill, "_fields": indexedFields},
|
||||||
|
)
|
||||||
|
|
||||||
|
def addEntry(self, entry):
|
||||||
|
self.write(override=entry)
|
||||||
|
|
||||||
|
def search(self, keydict: dict, strict: bool = True):
|
||||||
|
"""
|
||||||
|
Search cache for record for records with values
|
||||||
|
|
||||||
|
keydict: dict
|
||||||
|
Values to search for
|
||||||
|
|
||||||
|
strict: bool
|
||||||
|
Whether to require values match
|
||||||
|
"""
|
||||||
|
results = []
|
||||||
|
for key, val in self.data["_index"].items():
|
||||||
|
val = val.get()
|
||||||
|
if strict and type(val) != str:
|
||||||
|
addcheck = False
|
||||||
|
for k, v in keydict.items():
|
||||||
|
if k in val.keys():
|
||||||
|
if v in val[k]:
|
||||||
|
addcheck = True
|
||||||
|
else:
|
||||||
|
addcheck = False
|
||||||
|
break
|
||||||
|
if addcheck:
|
||||||
|
results.append([key, val])
|
||||||
|
elif type(val) != str:
|
||||||
|
for k, v in keydict.items():
|
||||||
|
if k in val.keys():
|
||||||
|
if v in val[k]:
|
||||||
|
results.append([key, val])
|
||||||
|
return results
|
|
@ -0,0 +1,6 @@
|
||||||
|
from Daisy.Daisy import Daisy
|
||||||
|
|
||||||
|
|
||||||
|
class Ref(Daisy):
|
||||||
|
def __init__(self, path, remoteNodeID):
|
||||||
|
super().__init__(path, remote=remoteNodeID)
|
|
@ -17,6 +17,9 @@ class Store(Daisy):
|
||||||
os.mkdir(fpath)
|
os.mkdir(fpath)
|
||||||
super().__init__("daisy/" + cpath)
|
super().__init__("daisy/" + cpath)
|
||||||
|
|
||||||
|
def createEmpty(self, key):
|
||||||
|
self.msg[key] = {}
|
||||||
|
|
||||||
def update(self, entry: str, data, recur: bool = True):
|
def update(self, entry: str, data, recur: bool = True):
|
||||||
if recur:
|
if recur:
|
||||||
for key in data.keys():
|
for key in data.keys():
|
||||||
|
|
|
@ -39,8 +39,10 @@ class Header(Packet):
|
||||||
packetCount: int,
|
packetCount: int,
|
||||||
sender: int,
|
sender: int,
|
||||||
senderDisplayName: int,
|
senderDisplayName: int,
|
||||||
|
sourceNode: int,
|
||||||
recipient: int,
|
recipient: int,
|
||||||
recipientNode: int,
|
recipientNode: int,
|
||||||
|
nonce,
|
||||||
subpacket: bool = False,
|
subpacket: bool = False,
|
||||||
wantFullResponse: bool = False,
|
wantFullResponse: bool = False,
|
||||||
packetsClass: int = 0,
|
packetsClass: int = 0,
|
||||||
|
@ -56,6 +58,8 @@ class Header(Packet):
|
||||||
self.subpacket = subpacket
|
self.subpacket = subpacket
|
||||||
self.wantFullResponse = wantFullResponse
|
self.wantFullResponse = wantFullResponse
|
||||||
self.pAction = pAction
|
self.pAction = pAction
|
||||||
|
self.sourceNode = sourceNode
|
||||||
|
self.nonce = nonce
|
||||||
|
|
||||||
def usePreset(self, path: str):
|
def usePreset(self, path: str):
|
||||||
"""
|
"""
|
||||||
|
@ -72,11 +76,13 @@ class Header(Packet):
|
||||||
res = msgpack.loads(super().dump())
|
res = msgpack.loads(super().dump())
|
||||||
res["sender"] = self.sender
|
res["sender"] = self.sender
|
||||||
res["senderDisplayName"] = self.senderDisplayName
|
res["senderDisplayName"] = self.senderDisplayName
|
||||||
|
res["sourceNode"] = self.sourceNode
|
||||||
res["recipient"] = self.recipient
|
res["recipient"] = self.recipient
|
||||||
res["recipientNode"] = self.recipientNode
|
res["recipientNode"] = self.recipientNode
|
||||||
res["subpacket"] = self.subpacket
|
res["subpacket"] = self.subpacket
|
||||||
res["wantFullResponse"] = self.wantFullResponse
|
res["wantFullResponse"] = self.wantFullResponse
|
||||||
res["packetsClass"] = self.packetsClass
|
res["packetsClass"] = self.packetsClass
|
||||||
res["pAction"] = self.pAction
|
res["pAction"] = self.pAction
|
||||||
|
res["nonce"] = self.nonce
|
||||||
|
|
||||||
return msgpack.dumps(res)
|
return msgpack.dumps(res)
|
||||||
|
|
|
@ -8,6 +8,10 @@ import math
|
||||||
# DO NOT CHANGE DATA SIZE UNLESS YOU KNOW WHAT YOURE DOING
|
# DO NOT CHANGE DATA SIZE UNLESS YOU KNOW WHAT YOURE DOING
|
||||||
|
|
||||||
|
|
||||||
|
def dict2bytes(cdict: dict):
|
||||||
|
return lzma.compress(msgpack.dumps(cdict))
|
||||||
|
|
||||||
|
|
||||||
class Message:
|
class Message:
|
||||||
"""
|
"""
|
||||||
Full message which is composed of `Packets.Packet.Packet`s
|
Full message which is composed of `Packets.Packet.Packet`s
|
||||||
|
@ -25,13 +29,16 @@ class Message:
|
||||||
bytesObject: bytes,
|
bytesObject: bytes,
|
||||||
sender: int,
|
sender: int,
|
||||||
senderDisplayName: int,
|
senderDisplayName: int,
|
||||||
|
sourceNode,
|
||||||
recipient: int,
|
recipient: int,
|
||||||
recipientNode: int,
|
recipientNode: int,
|
||||||
cryptographyInfo,
|
cryptographyInfo,
|
||||||
|
packetsClass,
|
||||||
|
pAction,
|
||||||
dataSize: int = 128,
|
dataSize: int = 128,
|
||||||
wantFullResponse: bool = False,
|
wantFullResponse: bool = False,
|
||||||
packetsClass: int = 0,
|
|
||||||
):
|
):
|
||||||
|
# TODO: PSK for usage prior to credentials
|
||||||
"""
|
"""
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
|
@ -75,7 +82,9 @@ class Message:
|
||||||
# Data passed in by peers should already have been e2ee encrypted by SubtleCrypto
|
# Data passed in by peers should already have been e2ee encrypted by SubtleCrypto
|
||||||
# Transport encryption
|
# Transport encryption
|
||||||
# bytesObject = lzma.compress(bytesObject, str(recipientNode).zfill(6), isDict=False)
|
# bytesObject = lzma.compress(bytesObject, str(recipientNode).zfill(6), isDict=False)
|
||||||
bytesObject = cryptographyInfo.encrypt(bytesObject, self.no)
|
bytesObject, nonce, tag = cryptographyInfo.encrypt(
|
||||||
|
bytesObject, str(recipientNode).zfill(6), isDict=False
|
||||||
|
)
|
||||||
packets = []
|
packets = []
|
||||||
self.packetsID = random.randrange(0, 999999)
|
self.packetsID = random.randrange(0, 999999)
|
||||||
pnum = 1
|
pnum = 1
|
||||||
|
@ -98,10 +107,13 @@ class Message:
|
||||||
pnum,
|
pnum,
|
||||||
sender,
|
sender,
|
||||||
senderDisplayName,
|
senderDisplayName,
|
||||||
|
sourceNode,
|
||||||
recipient,
|
recipient,
|
||||||
recipientNode,
|
recipientNode,
|
||||||
|
nonce,
|
||||||
wantFullResponse=wantFullResponse,
|
wantFullResponse=wantFullResponse,
|
||||||
packetsClass=packetsClass,
|
packetsClass=packetsClass,
|
||||||
|
pAction=pAction,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
for it in range(pnum):
|
for it in range(pnum):
|
||||||
|
@ -118,12 +130,18 @@ class Message:
|
||||||
"""
|
"""
|
||||||
return self.packets
|
return self.packets
|
||||||
|
|
||||||
def reassemble(self, completedMessage: dict):
|
def reassemble(self, completedMessage: dict, cryptographyInfo):
|
||||||
"""
|
"""
|
||||||
Reassemble packets from a completed message in `Sponge.base`
|
Reassemble packets from a completed message in `Sponge.base`
|
||||||
"""
|
"""
|
||||||
data = b""
|
data = b""
|
||||||
for it in range(1, int(completedMessage["packetCount"])):
|
for it in range(1, int(completedMessage["packetCount"])):
|
||||||
data += completedMessage["data"][completedMessage["dataOrder"].index(it)]
|
data += completedMessage["data"][completedMessage["dataOrder"].index(it)]
|
||||||
res = msgpack.loads(lzma.decompress(data))
|
res = msgpack.loads(
|
||||||
|
lzma.decompress(
|
||||||
|
cryptographyInfo.decrypt(
|
||||||
|
data, completedMessage["sourceNode"], completedMessage["nonce"]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
return res
|
return res
|
||||||
|
|
|
@ -0,0 +1,27 @@
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
|
class IndexSync(Message):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
index,
|
||||||
|
):
|
||||||
|
bytesOb = Packets.Message.dict2bytes({"index": index})
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
4,
|
||||||
|
2,
|
||||||
|
)
|
|
@ -0,0 +1,29 @@
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
|
class CatchRequest(Message):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
head,
|
||||||
|
body,
|
||||||
|
fins,
|
||||||
|
):
|
||||||
|
bytesOb = Packets.Message.dict2bytes({"head": head, "body": body, "fins": fins})
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
1,
|
||||||
|
0,
|
||||||
|
)
|
|
@ -0,0 +1,27 @@
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
|
class CatchResponse(Message):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
html,
|
||||||
|
):
|
||||||
|
bytesOb = Packets.Message.dict2bytes({"html": html})
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
1,
|
||||||
|
1,
|
||||||
|
)
|
|
@ -0,0 +1,34 @@
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
|
class Handshake(Message):
|
||||||
|
def __init__(
|
||||||
|
self, sender, senderID, recipient, recipientNode, cryptographyInfo, onodeID
|
||||||
|
):
|
||||||
|
publicKey = None
|
||||||
|
ephemeralKey = None
|
||||||
|
if onodeID in cryptographyInfo.loadedKeys.keys():
|
||||||
|
if "staticKey" in cryptographyInfo.loadedKeys[onodeID].keys():
|
||||||
|
publicKey = cryptographyInfo.loadedKeys[onodeID]["staticKey"]
|
||||||
|
else:
|
||||||
|
cryptographyInfo.genStaticKey(onodeID)
|
||||||
|
publicKey = cryptographyInfo.loadedKeys[onodeID]["staticKey"]
|
||||||
|
if "ourEphemeralKey" in cryptographyInfo.loadedKeys[onodeID].keys():
|
||||||
|
ephemeralKey = cryptographyInfo.loadedKeys[onodeID]["ourEphemeralKey"]
|
||||||
|
else:
|
||||||
|
cryptographyInfo.genOurEphemeralKey(onodeID)
|
||||||
|
ephemeralKey = cryptographyInfo.loadedKeys[onodeID]["ourEphemeralKey"]
|
||||||
|
|
||||||
|
bytesOb = Packets.Message.dict2bytes(
|
||||||
|
{"publicKey": publicKey, "ephemeralKey": ephemeralKey}
|
||||||
|
)
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
packetsClass=3,
|
||||||
|
)
|
|
@ -0,0 +1,28 @@
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
|
class HopperRequest(Message):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
url,
|
||||||
|
params,
|
||||||
|
cryptographyInfo,
|
||||||
|
):
|
||||||
|
bytesOb = Packets.Message.dict2bytes({"url": url, "params": params})
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
5,
|
||||||
|
0,
|
||||||
|
)
|
|
@ -0,0 +1,21 @@
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
|
class HopperResponse(Message):
|
||||||
|
def __init__(
|
||||||
|
self, sender, senderID, recipient, recipientNode, response, cryptographyInfo
|
||||||
|
):
|
||||||
|
bytesOb = Packets.Message.dict2bytes({"response": response})
|
||||||
|
|
||||||
|
bytesOb = cryptographyInfo.encrypt(bytesOb, recipientNode)
|
||||||
|
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
packetsClass=5,
|
||||||
|
)
|
|
@ -0,0 +1,20 @@
|
||||||
|
# Template for a Protocol message
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
|
class Template(Message):
|
||||||
|
def __init__(
|
||||||
|
self, sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo
|
||||||
|
):
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
)
|
|
@ -0,0 +1,33 @@
|
||||||
|
class Action:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
action,
|
||||||
|
data,
|
||||||
|
sender=None,
|
||||||
|
senderID=None,
|
||||||
|
sourceNode=None,
|
||||||
|
recipient=None,
|
||||||
|
recipientNode=None,
|
||||||
|
):
|
||||||
|
self.action = action
|
||||||
|
self.data = data
|
||||||
|
if sender != None:
|
||||||
|
self.data["sender"] = sender
|
||||||
|
|
||||||
|
if senderID != None:
|
||||||
|
self.data["senderID"] = senderID
|
||||||
|
|
||||||
|
if sourceNode != None:
|
||||||
|
self.data["sourceNode"] = sourceNode
|
||||||
|
|
||||||
|
if recipient != None:
|
||||||
|
self.data["recipient"] = recipient
|
||||||
|
|
||||||
|
if recipientNode != None:
|
||||||
|
self.data["recipientNode"] = recipientNode
|
||||||
|
|
||||||
|
def getAction(self):
|
||||||
|
return self.action
|
||||||
|
|
||||||
|
def getData(self):
|
||||||
|
return self.data
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
title: Node
|
||||||
|
---
|
||||||
|
flowchart LR
|
||||||
|
id
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
title: Node
|
||||||
|
---
|
||||||
|
flowchart LR
|
||||||
|
id
|
Binary file not shown.
After Width: | Height: | Size: 1.5 KiB |
Binary file not shown.
After Width: | Height: | Size: 4.7 KiB |
|
@ -1,7 +1,14 @@
|
||||||
|
from uuid import uuid4
|
||||||
|
import Components.hopper as hopper
|
||||||
|
from Packets.Messages.Protocols.catch.Request import CatchRequest
|
||||||
|
from Packets.Messages.Protocols.hopper.Request import HopperRequest
|
||||||
|
|
||||||
from microdot import Microdot
|
from microdot import Microdot
|
||||||
from microdot import send_file
|
from microdot import send_file
|
||||||
from microdot.websocket import with_websocket
|
from microdot.websocket import with_websocket
|
||||||
from microdot import Request
|
from microdot import Request
|
||||||
|
from microdot.jinja import Template
|
||||||
|
from microdot.session import Session, with_session
|
||||||
|
|
||||||
import random
|
import random
|
||||||
import json
|
import json
|
||||||
|
@ -44,7 +51,17 @@ class Server:
|
||||||
Reference to our Catch Cache instance to pull from for serving Catchs
|
Reference to our Catch Cache instance to pull from for serving Catchs
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, transceiver, catch, onodeID, network, cLog):
|
def __init__(
|
||||||
|
self,
|
||||||
|
transceiver,
|
||||||
|
catch,
|
||||||
|
onodeID,
|
||||||
|
network,
|
||||||
|
cLog,
|
||||||
|
cryptographyInfo,
|
||||||
|
remoteCatchIndex,
|
||||||
|
cache
|
||||||
|
):
|
||||||
self.cLog = cLog
|
self.cLog = cLog
|
||||||
self.transceiver = transceiver
|
self.transceiver = transceiver
|
||||||
self.network = network
|
self.network = network
|
||||||
|
@ -52,7 +69,12 @@ class Server:
|
||||||
self.nodeID = str(onodeID)
|
self.nodeID = str(onodeID)
|
||||||
self.peerIDs = {}
|
self.peerIDs = {}
|
||||||
self.app = Microdot()
|
self.app = Microdot()
|
||||||
|
# TODO: Secret key generation
|
||||||
|
self.session = Session(self.app, secret_key='6e012a8d-f857-4bd1-a245-bbe6a27e6440')
|
||||||
self.catch = catch
|
self.catch = catch
|
||||||
|
self.cache = cache
|
||||||
|
self.cryptographyInfo = cryptographyInfo
|
||||||
|
self.remoteCatchIndex = remoteCatchIndex
|
||||||
# self.nmap = {self.nodeID: self.t.interface.localNode.nodeNum}
|
# self.nmap = {self.nodeID: self.t.interface.localNode.nodeNum}
|
||||||
# self.cLog(20, "Initialized server")
|
# self.cLog(20, "Initialized server")
|
||||||
|
|
||||||
|
@ -139,8 +161,84 @@ class Server:
|
||||||
packetsClass=2,
|
packetsClass=2,
|
||||||
)
|
)
|
||||||
elif trigger == "catch":
|
elif trigger == "catch":
|
||||||
res = self.catch.get(message["head"], message["body"])
|
res = self.catch.get(
|
||||||
await ws.send('<div id="catchDisplay">{0}</div>'.format(res))
|
message["head"],
|
||||||
|
message["body"],
|
||||||
|
fins=message["finsStr"].split(","),
|
||||||
|
)
|
||||||
|
if res == False:
|
||||||
|
await ws.send(
|
||||||
|
'<div id="catchDisplay">{0}</div>'.format(
|
||||||
|
"Searching PierMesh for Catch please wait...<img src='/static/img/searching.gif'>"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
peerID = message["catchPeerID"]
|
||||||
|
q = {
|
||||||
|
"head": message["head"],
|
||||||
|
"body": message["body"],
|
||||||
|
"fins": message["fins"].split(","),
|
||||||
|
}
|
||||||
|
if q["fins"] == "":
|
||||||
|
del q["fins"]
|
||||||
|
q = self.remoteCatchIndex.search(q)
|
||||||
|
if q != False:
|
||||||
|
CatchRequest(
|
||||||
|
peerID,
|
||||||
|
000000,
|
||||||
|
self.nodeID,
|
||||||
|
q["remoteNode"],
|
||||||
|
q["remoteNode"]",
|
||||||
|
self.cryptographyInfo,
|
||||||
|
message["head"],
|
||||||
|
message["body"],
|
||||||
|
message["fins"],
|
||||||
|
)
|
||||||
|
# TODO: Daisy replication settings
|
||||||
|
elif trigger == "hopper":
|
||||||
|
url = message["url"]
|
||||||
|
isPost = bool(message["isPost"])
|
||||||
|
remote = bool(message["remote"])
|
||||||
|
remoteNode = message["remoteNode"]
|
||||||
|
params = json.loads(message["params"])
|
||||||
|
# TODO: Redirecting to html content
|
||||||
|
if remote:
|
||||||
|
peerID = message["peerID"]
|
||||||
|
await ws.send(
|
||||||
|
'<div id="lilypad">{0}</div>'.format(
|
||||||
|
"Requesting hop from remote node...<img src='/static/img/searching.gif'>"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
r = HopperRequest(
|
||||||
|
peerID,
|
||||||
|
000000,
|
||||||
|
self.nodeID,
|
||||||
|
remoteNode,
|
||||||
|
remoteNode,
|
||||||
|
url,
|
||||||
|
params,
|
||||||
|
self.cryptographyInfo,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if isPost:
|
||||||
|
await ws.send(
|
||||||
|
'<div id="lilypad">{0}</div>'.format(
|
||||||
|
hopper.post(url, params)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
res = hopper.get(url, params)
|
||||||
|
if res["content-type"] == "text/html":
|
||||||
|
resID = uuid4()
|
||||||
|
self.cache.create("tmp/hopper/" + resID, {"html": res})
|
||||||
|
await ws.send("<div id='lilypad'><a href='/hop/{0}'></a></div>".format(resID))
|
||||||
|
else:
|
||||||
|
await ws.send(
|
||||||
|
'<div id="lilypad">{0}</div>'.format(res)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
await ws.send('<div id="catchDisplay">{0}</div>'.format(res))
|
||||||
# TODO: Catch update packets
|
# TODO: Catch update packets
|
||||||
elif trigger == "catchEdit":
|
elif trigger == "catchEdit":
|
||||||
self.catch.addc(
|
self.catch.addc(
|
||||||
|
@ -168,6 +266,23 @@ class Server:
|
||||||
"""
|
"""
|
||||||
return send_file("webui/build/index/index.html")
|
return send_file("webui/build/index/index.html")
|
||||||
|
|
||||||
|
@self.app.route("/hop/<tmpResourceID>")
|
||||||
|
async def hop(request, tmpResourceID):
|
||||||
|
return self.cache.get("tmp/hopper/" + tmpResourceID).get()["html"]
|
||||||
|
|
||||||
|
@self.app.route("/api/json")
|
||||||
|
async def api(request):
|
||||||
|
return {"hello": "world"}
|
||||||
|
|
||||||
|
@self.app.route("/admin")
|
||||||
|
@with_session
|
||||||
|
async def admin(request):
|
||||||
|
return Template('admin/admin.html').render(psks=self.getPSKs())
|
||||||
|
|
||||||
|
async def getPSKs(self):
|
||||||
|
psks = [{"psk": v["PSK"], "nodeID": k} for k,v in self.cryptographyInfo["msg"].items()]
|
||||||
|
return psks
|
||||||
|
|
||||||
async def sendToPeer(self, peerID: str, data: str):
|
async def sendToPeer(self, peerID: str, data: str):
|
||||||
"""
|
"""
|
||||||
Send data to Websocket of peer with peerID
|
Send data to Websocket of peer with peerID
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
{% extends "shared/base.html" %}
|
||||||
|
{% block body %}
|
||||||
|
<form id="admin" hx-ext="ws" ws-connect="/bubble">
|
||||||
|
<ul id="psks">
|
||||||
|
{% for psk in psks %}
|
||||||
|
<li>
|
||||||
|
Node ID: {{ psk['nodeID'] }}
|
||||||
|
<br>
|
||||||
|
PSK:
|
||||||
|
<input type="number" id="{{ psk['nodeID'] }}" name="{{ psk['nodeID'] }}" value="{{ psk['psk'] }}" max="999999">
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
<button id="pskUpdate" name="pskUpdate">Update PSK</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
|
@ -4,6 +4,8 @@
|
||||||
<img alt="PierMesh logo" height="128px" src="/res/img/logo.png">
|
<img alt="PierMesh logo" height="128px" src="/res/img/logo.png">
|
||||||
<br>
|
<br>
|
||||||
<br>
|
<br>
|
||||||
|
{% include "shared/hopper.html" %}
|
||||||
|
<br>
|
||||||
{% include "shared/catch.nav.html" %}
|
{% include "shared/catch.nav.html" %}
|
||||||
<br>
|
<br>
|
||||||
{% include "shared/catch.editor.html" %}
|
{% include "shared/catch.editor.html" %}
|
||||||
|
|
|
@ -15,14 +15,41 @@
|
||||||
<input type="text" id="body" name="body" size="16" maxlength="16">
|
<input type="text" id="body" name="body" size="16" maxlength="16">
|
||||||
<ul id="fins">
|
<ul id="fins">
|
||||||
Fins:
|
Fins:
|
||||||
<li class="fin">
|
<li id="pfin" class="fin">
|
||||||
<input type="text" size="8" maxlength="8">
|
<input type="text" id="fin00" name="fin00" size="8" maxlength="8">
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<button>+</button>
|
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
<button>Get</button>
|
<script>
|
||||||
|
function addFin() {
|
||||||
|
var pfin = document.querySelector('#fins').lastElementChild;
|
||||||
|
var pclone = pfin.cloneNode();
|
||||||
|
pclone.id = 'cfin';
|
||||||
|
var cid = pclone.firstElementChild.id;
|
||||||
|
cid = cid.substring(3);
|
||||||
|
cid = ('' + parseInt(cid) + 1).padStart(2, '0');
|
||||||
|
cid = fin + cid;
|
||||||
|
pclone.firstElementChild.id = cid;
|
||||||
|
pclone.firstElementChild.id = cid;
|
||||||
|
pfin.insertAdjacentElement('afterend', pclone);
|
||||||
|
}
|
||||||
|
function getFins() {
|
||||||
|
var fins = document.querySelector("#fins").children;
|
||||||
|
var finsStr = "";
|
||||||
|
for (var i = 0; i < fins.length; i++) {
|
||||||
|
var fin = fins[i];
|
||||||
|
finsStr = finsStr + fin.firstElementChild.value;
|
||||||
|
if (i != (fins.length - 1)) {
|
||||||
|
finsStr = finsStr + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
document.getElementById("finaStr").value = finsStr;
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
<button onclick="addFin();">+ Fin</button>
|
||||||
|
<input type="hidden" id="catchPeerID" name="catchPeerID">
|
||||||
|
<input type="hidden" id="finsStr" name="finsStr">
|
||||||
|
<button
|
||||||
|
onclick"document.getElementById('catchPeerID').value=document.getElementById('peerID');getFins();">Get</button>
|
||||||
</form>
|
</form>
|
||||||
Results:
|
Results:
|
||||||
<br>
|
<br>
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
<div class="plank" hx-ext="ws" ws-connect="/bubble">
|
||||||
|
<img src="/res/img/hopperdisplay.png">
|
||||||
|
<br>
|
||||||
|
Hopper<br><br>
|
||||||
|
<form id="hopper" ws-send>
|
||||||
|
<label for="url">URL</label>
|
||||||
|
<br>
|
||||||
|
<input type="text" id="url" name="url" size="255" maxlength="255">
|
||||||
|
<br>
|
||||||
|
<label for="params">Parameters (json)</label>
|
||||||
|
<br>
|
||||||
|
<input type="textarea" id="parameters" name="parameters" size="255">
|
||||||
|
<br>
|
||||||
|
<label for="isPost">Unchecked: GET, Checked: POST</label>
|
||||||
|
<br>
|
||||||
|
<input type="checkbox" id="isPost" name="isPost">
|
||||||
|
<br>
|
||||||
|
<label for="remote">Unchecked: GET, Checked: POST</label>
|
||||||
|
<br>
|
||||||
|
<input type="checkbox" id="remote" name="remote">
|
||||||
|
<br>
|
||||||
|
<label for="remoteNode">Remote node ID</label>
|
||||||
|
<br>
|
||||||
|
<input type="number" id="remoteNode" name="remoteNode" max="999999">
|
||||||
|
<br>
|
||||||
|
<input id="hopperPeerID" name="hopperPeerID" type="hidden">
|
||||||
|
<button onclick="document.getElementById('hopperPeerID').value = document.getElementById('peerID')">Get</button>
|
||||||
|
<br>
|
||||||
|
<div style="background-color: var(--palette-three);" id="lilypad"></div>
|
||||||
|
</form>
|
||||||
|
</div>
|
|
@ -1,21 +1,36 @@
|
||||||
async def filter(completeMessage, recipient, recipientNode, todo):
|
from Services.Action import Action
|
||||||
|
from src.Packets.Message import p
|
||||||
|
|
||||||
|
|
||||||
|
async def filter(completeMessage, recipient, recipientNode, todo, toLocal=True):
|
||||||
"""
|
"""
|
||||||
Catch exchange protocol
|
Catch exchange protocol
|
||||||
|
|
||||||
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py>`__
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py>`__
|
||||||
"""
|
"""
|
||||||
|
# TODO: Checking for catch om other nodes and maintaining defined duplication via Daisy
|
||||||
|
# TODO: Daisy protocol
|
||||||
|
# TODO: pAction for syncing listing
|
||||||
|
# TODO: pAction for querying
|
||||||
m = completeMessage
|
m = completeMessage
|
||||||
# TODO: Sending to other nodes clients
|
# TODO: Sending to other nodes clients
|
||||||
todo.append(
|
pAction = int(m["pAction"])
|
||||||
{
|
if pAction == 0:
|
||||||
"action": "sendCatch",
|
todo.append(
|
||||||
"data": {
|
Action(
|
||||||
"toLocal": True,
|
"sendCatch",
|
||||||
"recipientNode": recipientNode,
|
{"head": m["head"], "body": m["body"], "fins": m["fins"]},
|
||||||
"recipient": recipient,
|
recipientNode=m["sourceNode"],
|
||||||
"head": m["head"],
|
recipent=m["sender"],
|
||||||
"body": m["body"],
|
)
|
||||||
"fins": m["fins"],
|
)
|
||||||
},
|
elif pAction == 1:
|
||||||
}
|
todo.append(Action("routeCatch", {"html": m["html"]}, recipient=recipient))
|
||||||
)
|
elif pAction == 2:
|
||||||
|
todo.append(Action("syncIndex", m["index"]))
|
||||||
|
else:
|
||||||
|
todo.append(
|
||||||
|
Action(
|
||||||
|
"cLog", {"message": "Unknown pAction " + m["pAction"], "priority": 20}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
|
@ -9,7 +9,7 @@ async def filter(completeMessage, recipientNode, todo):
|
||||||
"action": "keyDeriveDH",
|
"action": "keyDeriveDH",
|
||||||
"data": {
|
"data": {
|
||||||
"publicKey": completeMessage["data"]["publicKey"],
|
"publicKey": completeMessage["data"]["publicKey"],
|
||||||
"params": completeMessage["data"]["params"],
|
"peerEphemeralKey": completeMessage["data"]["ephemeralKey"],
|
||||||
"recipientNode": recipientNode,
|
"recipientNode": recipientNode,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
from Services.Action import Action
|
||||||
|
|
||||||
|
|
||||||
|
async def filter(completeMessage, todo, recipient, recipientNode):
|
||||||
|
"""
|
||||||
|
Internet inter(h)op protocol
|
||||||
|
|
||||||
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/hopper.py>`__
|
||||||
|
"""
|
||||||
|
m = completeMessage
|
||||||
|
pAction = int(m["pAction"])
|
||||||
|
if pAction == 0:
|
||||||
|
todo.append(
|
||||||
|
Action(
|
||||||
|
"hop",
|
||||||
|
{"url": m["data"]["url"], "method": m["method"]},
|
||||||
|
recipient=m["sender"],
|
||||||
|
recipientNode=m["sourceNode"],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif pAction == 1:
|
||||||
|
Action("routeHop", {"res": m["res"]}, recipient=recipient)
|
|
@ -96,6 +96,10 @@ class Filter:
|
||||||
await Sponge.Protocols.cryptography.filter(
|
await Sponge.Protocols.cryptography.filter(
|
||||||
completeMessage, recipientNode, self.todo
|
completeMessage, recipientNode, self.todo
|
||||||
)
|
)
|
||||||
|
elif protocol == "hopper":
|
||||||
|
await Sponge.Protocols.hopper.filter(
|
||||||
|
completeMessage, self.todo, recipient, recipientNode
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self.cLog(30, "Cant route, no protocol")
|
self.cLog(30, "Cant route, no protocol")
|
||||||
|
|
||||||
|
|
|
@ -185,6 +185,7 @@ class Transceiver:
|
||||||
break
|
break
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
# TODO: Deprecate
|
||||||
async def addPackets(
|
async def addPackets(
|
||||||
self,
|
self,
|
||||||
data,
|
data,
|
||||||
|
@ -248,6 +249,23 @@ class Transceiver:
|
||||||
}
|
}
|
||||||
self.tasks[str(self.cpid)] = currentTask
|
self.tasks[str(self.cpid)] = currentTask
|
||||||
|
|
||||||
|
async def sendMessage(self, message, recipientNode=None):
|
||||||
|
for p in message.packets:
|
||||||
|
if recipientNode == None:
|
||||||
|
self.send(p)
|
||||||
|
else:
|
||||||
|
self.cLog(10, "Sending target: " + str(recipientNode))
|
||||||
|
self.send(p, recipientNode=recipientNode)
|
||||||
|
awaitTask = asyncio.create_task(self.awaitResponse(self.cpid))
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
currentTask = {
|
||||||
|
"ob": awaitTask,
|
||||||
|
"pid": str(self.cpid),
|
||||||
|
"packet": p,
|
||||||
|
"retry": False,
|
||||||
|
}
|
||||||
|
self.tasks[str(self.cpid)] = currentTask
|
||||||
|
|
||||||
async def progressCheck(self):
|
async def progressCheck(self):
|
||||||
"""
|
"""
|
||||||
Checks if acknowldgement was received per packet and if not resends
|
Checks if acknowldgement was received per packet and if not resends
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
pylint --errors-only --disable=C,R run.py > tmp.debug
|
||||||
|
nano tmp.debug
|
175
src/run.py
175
src/run.py
|
@ -3,10 +3,16 @@ from Sponge.base import Filter
|
||||||
from Siph.map import Network
|
from Siph.map import Network
|
||||||
from Daisy.Catch import Catch
|
from Daisy.Catch import Catch
|
||||||
from Daisy.Cache import Cache
|
from Daisy.Cache import Cache
|
||||||
|
from Daisy.Index import Index
|
||||||
|
from Daisy.CryptographyUtil import SteelPetal
|
||||||
from Splash.serve import Server
|
from Splash.serve import Server
|
||||||
from Transceiver.Transceiver import Transceiver
|
from Transceiver.Transceiver import Transceiver
|
||||||
from Cryptography.WhaleSong import DHEFern
|
from Cryptography.WhaleSong import Transport
|
||||||
from ui import TUI
|
from ui import TUI
|
||||||
|
import Components.hopper as hopper
|
||||||
|
from Packets.Messages.Protocols.hopper.Response import HopperResponse
|
||||||
|
from Packets.Messages.Protocols.catch.Response import CatchResponse
|
||||||
|
|
||||||
|
|
||||||
# Generic imports
|
# Generic imports
|
||||||
import logging
|
import logging
|
||||||
|
@ -18,19 +24,80 @@ import datetime
|
||||||
import traceback
|
import traceback
|
||||||
import threading
|
import threading
|
||||||
import random
|
import random
|
||||||
|
import lzma
|
||||||
|
import argparse
|
||||||
|
import configparser
|
||||||
|
|
||||||
# Process management library
|
# Process management library
|
||||||
import psutil
|
import psutil
|
||||||
|
|
||||||
|
import msgpack
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
global nodeOb, tuiOb
|
global nodeOb, tuiOb, argConfig, config
|
||||||
"""
|
"""
|
||||||
Global objects for the PierMesh service and the TUI so we can terminate the associated processes later
|
Global objects for the PierMesh service and the TUI so we can terminate the associated processes later
|
||||||
"""
|
"""
|
||||||
nodeOb = None
|
nodeOb = None
|
||||||
tuiOb = None
|
tuiOb = None
|
||||||
# Pull startup parameters
|
# Pull startup parameters
|
||||||
device, webPort, serverInfoFile, delay, nodeNickname = sys.argv[1:]
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("-d", "--device", help="Set transceiver device path")
|
||||||
|
parser.add_argument("-p", "--port", help="Web UI server port")
|
||||||
|
parser.add_argument("-n", "--nickname", help="Node nickname")
|
||||||
|
parser.add_argument(
|
||||||
|
"-s", "--startupDelay", help="Startup delay (useful for testing)"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-o", "--override", help="Whether to override config", default=False
|
||||||
|
)
|
||||||
|
parser.add_argument("-x", "--showTUI", help="Whether to show TUI", default=True)
|
||||||
|
argConfig = parser.parse_args()
|
||||||
|
config = configparser.ConfigParser()
|
||||||
|
if argConfig.confList != False:
|
||||||
|
pass
|
||||||
|
config.read(".piermesh")
|
||||||
|
|
||||||
|
device = ""
|
||||||
|
if "transceiverPort" in config["OPERATOR_REQUIRED"]:
|
||||||
|
if argConfig.override:
|
||||||
|
device = argConfig.device
|
||||||
|
else:
|
||||||
|
device = config["OPERATOR_REQUIRED"]["transceiverPort"]
|
||||||
|
else:
|
||||||
|
if argConfig.device == False:
|
||||||
|
print("No device set exiting...")
|
||||||
|
exit(0)
|
||||||
|
else:
|
||||||
|
device = argConfig.device
|
||||||
|
webPort = config["DEFAULT"]["WebUIPort"]
|
||||||
|
if argConfig.override:
|
||||||
|
webPort = argConfig.port
|
||||||
|
else:
|
||||||
|
if "WebUIPort" in config["OPERATOR_OVERRIDES"]:
|
||||||
|
webPort = config["OPERATOR_OVERRIDES"]["WebUIPort"]
|
||||||
|
webPort = int(webPort)
|
||||||
|
delay = config["DEFAULT"]["StartupDelay"]
|
||||||
|
if argConfig.override:
|
||||||
|
delay = argConfig.delay
|
||||||
|
else:
|
||||||
|
if "StartupDelay" in config["OPERATOR_OVERRIDES"]:
|
||||||
|
delay = config["OPERATOR_OVERRIDES"]["StartupDelay"]
|
||||||
|
delay = int(delay)
|
||||||
|
nodeNickname = config["DEFAULT"]["Nickname"]
|
||||||
|
if argConfig.override:
|
||||||
|
nodeNickname = argConfig.nickname
|
||||||
|
else:
|
||||||
|
if "Nickname" in config["OPERATOR_OVERRIDES"]:
|
||||||
|
nodeNickname = config["OPERATOR_OVERRIDES"]["Nickname"]
|
||||||
|
showTUI = config["DEFAULT"]["ShowTUI"]
|
||||||
|
if argConfig.override:
|
||||||
|
showTUI = argConfig.showTUI
|
||||||
|
else:
|
||||||
|
if "ShowTUI" in config["OPERATOR_OVERRIDES"]:
|
||||||
|
showTUI = config["OPERATOR_OVERRIDES"]["ShowTUI"]
|
||||||
|
showTUI = bool(showTUI)
|
||||||
|
|
||||||
# Set up file based logging
|
# Set up file based logging
|
||||||
logPath = "logs"
|
logPath = "logs"
|
||||||
|
@ -103,6 +170,8 @@ class Node:
|
||||||
self.network = Network()
|
self.network = Network()
|
||||||
self.catch = Catch(walk=True)
|
self.catch = Catch(walk=True)
|
||||||
self.cache = Cache(walk=True)
|
self.cache = Cache(walk=True)
|
||||||
|
self.remoteCatchIndex = Index(nodeNickname)
|
||||||
|
self.daisyCryptography = None
|
||||||
|
|
||||||
self.nodeInfo = self.cache.get(serverInfoFile)
|
self.nodeInfo = self.cache.get(serverInfoFile)
|
||||||
if self.nodeInfo == False:
|
if self.nodeInfo == False:
|
||||||
|
@ -117,7 +186,7 @@ class Node:
|
||||||
self.cLog(10, "Command line arguments: " + ", ".join(sys.argv))
|
self.cLog(10, "Command line arguments: " + ", ".join(sys.argv))
|
||||||
self.oTransceiver = None
|
self.oTransceiver = None
|
||||||
self.cLog(20, "Cryptography initializing")
|
self.cLog(20, "Cryptography initializing")
|
||||||
self.cryptographyInfo = DHEFern(self.cache, nodeNickname, self.cLog)
|
self.cryptographyInfo = Transport(self.cache, nodeNickname, self.cLog)
|
||||||
self.cLog(20, "Cryptography initialized")
|
self.cLog(20, "Cryptography initialized")
|
||||||
self.processed = []
|
self.processed = []
|
||||||
self.proc = psutil.Process(os.getpid())
|
self.proc = psutil.Process(os.getpid())
|
||||||
|
@ -143,6 +212,13 @@ class Node:
|
||||||
logging.log(priority, message)
|
logging.log(priority, message)
|
||||||
self.toLog.append("[{0}]:\n{1}".format(datetime.datetime.now(), message))
|
self.toLog.append("[{0}]:\n{1}".format(datetime.datetime.now(), message))
|
||||||
|
|
||||||
|
async def fsInit(self):
|
||||||
|
# TODO: Flesh out and properly link everything
|
||||||
|
if not os.path.exists("data"):
|
||||||
|
os.makedirs("data")
|
||||||
|
if not os.path.exists("data/" + nodeNickname):
|
||||||
|
os.makedirs("data/" + nodeNickname)
|
||||||
|
|
||||||
async def monitor(self):
|
async def monitor(self):
|
||||||
global tuiOb
|
global tuiOb
|
||||||
"""
|
"""
|
||||||
|
@ -180,8 +256,12 @@ class Node:
|
||||||
We use a common technique here that calls the function from our preloaded actions via dictionary entry
|
We use a common technique here that calls the function from our preloaded actions via dictionary entry
|
||||||
"""
|
"""
|
||||||
while True:
|
while True:
|
||||||
while len(self.todo) >= 1:
|
while (len(self.todo) >= 1) & (len(tuiOb.todo) >= 1):
|
||||||
todoNow = self.todo.pop()
|
todoNow = None
|
||||||
|
if len(self.todo) > 0:
|
||||||
|
todoNow = self.todo.pop()
|
||||||
|
else:
|
||||||
|
todoNow = tuiOb.todo.pop()
|
||||||
action = todoNow["action"]
|
action = todoNow["action"]
|
||||||
self.cLog(20, "Action: " + action)
|
self.cLog(20, "Action: " + action)
|
||||||
data = todoNow["data"]
|
data = todoNow["data"]
|
||||||
|
@ -212,7 +292,26 @@ class Node:
|
||||||
Get catch and return the data to a peer
|
Get catch and return the data to a peer
|
||||||
"""
|
"""
|
||||||
res = self.catch.get(data["head"], data["body"], fins=data["fins"])
|
res = self.catch.get(data["head"], data["body"], fins=data["fins"])
|
||||||
self.server.sendToPeer(data["recipient"], res)
|
r = CatchResponse(
|
||||||
|
self.nodeInfo.onodeID,
|
||||||
|
000000,
|
||||||
|
self.nodeInfo.onodeID,
|
||||||
|
data["recipient"],
|
||||||
|
data["recipientNode"],
|
||||||
|
self.cryptographyInfo,
|
||||||
|
res,
|
||||||
|
)
|
||||||
|
self.oTransceiver.sendMessage(r)
|
||||||
|
|
||||||
|
async def action_cLog(self, data: dict):
|
||||||
|
self.cLog(data["priority"], data["message"])
|
||||||
|
|
||||||
|
async def action_routeCatch(self, data: dict):
|
||||||
|
self.server.sendToPeer(data["recipient"], data["html"])
|
||||||
|
|
||||||
|
async def action_syncIndex(self, data: dict):
|
||||||
|
for entry in data["index"]:
|
||||||
|
self.remoteCatchIndex.addEntry(entry)
|
||||||
|
|
||||||
async def action_map(self, data: dict):
|
async def action_map(self, data: dict):
|
||||||
"""
|
"""
|
||||||
|
@ -226,7 +325,7 @@ class Node:
|
||||||
self.cLog(20, "Lookup addition done")
|
self.cLog(20, "Lookup addition done")
|
||||||
self.network.addon(data["onodeID"])
|
self.network.addon(data["onodeID"])
|
||||||
|
|
||||||
async def action_initNodeDH(self, data: dict):
|
async def action_initCryptography(self, data: dict):
|
||||||
"""
|
"""
|
||||||
Initialize diffie hellman key exchange
|
Initialize diffie hellman key exchange
|
||||||
|
|
||||||
|
@ -234,25 +333,43 @@ class Node:
|
||||||
--------
|
--------
|
||||||
Cryptography.DHEFern.DHEFern: End to end encryption functionality
|
Cryptography.DHEFern.DHEFern: End to end encryption functionality
|
||||||
"""
|
"""
|
||||||
if self.cryptographyInfo.getRecord("key", data["onodeID"]) == False:
|
self.cryptographyInfo.sessionSetup(
|
||||||
await self.oTransceiver.initNodeDH(
|
data["recipientNode"], data["publicKey"], data["peerEphemeralKey"]
|
||||||
self.cryptographyInfo, int(data["mnodeID"]), data["onodeID"]
|
)
|
||||||
)
|
|
||||||
|
|
||||||
async def action_keyDeriveDH(self, data: dict):
|
async def action_hop(self, data):
|
||||||
"""
|
|
||||||
Derive key via diffie hellman key exchange
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
self.cryptographyInfo.keyDerive(
|
r = None
|
||||||
data["publicKey"],
|
if data["method"] == "get":
|
||||||
self.cryptographyInfo.getSalt(),
|
r = hopper.get(
|
||||||
data["recipientNode"],
|
data["url"],
|
||||||
data["params"],
|
params=data["parameters"],
|
||||||
)
|
followTags=["img", "script", "link"],
|
||||||
|
)
|
||||||
|
elif data["method"] == "post":
|
||||||
|
r = hopper.post(data["url"], params=data["parameters"])
|
||||||
|
if r != None:
|
||||||
|
r = HopperResponse(
|
||||||
|
self.onodeID,
|
||||||
|
000000,
|
||||||
|
data["recipient"],
|
||||||
|
data["recipientNode"],
|
||||||
|
r,
|
||||||
|
self.cryptographyInfo,
|
||||||
|
)
|
||||||
|
self.oTransceiver.sendMessage(r)
|
||||||
|
|
||||||
except:
|
except:
|
||||||
self.cLog(30, traceback.format_exc())
|
self.cLog(30, traceback.format_exc())
|
||||||
|
|
||||||
|
async def routeHop(self, data: dict):
|
||||||
|
self.server.sendToPeer(data["recipient"], data["res"])
|
||||||
|
|
||||||
|
async def action_addPSK(self, data):
|
||||||
|
# TODO: Switch to credential
|
||||||
|
self.cryptographyInfo.createEmpty(data["nodeID"])
|
||||||
|
self.cryptographyInfo.update(data["nodeID"], {"PSK": data["PSK"]})
|
||||||
|
|
||||||
|
|
||||||
async def logPassLoop():
|
async def logPassLoop():
|
||||||
"""
|
"""
|
||||||
|
@ -285,7 +402,9 @@ async def main():
|
||||||
"""
|
"""
|
||||||
global nodeOb
|
global nodeOb
|
||||||
try:
|
try:
|
||||||
|
passkey = input("Enter node decryption key: ")
|
||||||
nodeOb = Node()
|
nodeOb = Node()
|
||||||
|
nodeOb.daisyCryptography = SteelPetal(passkey, nodeOb.cLog)
|
||||||
nodeOb.cLog(20, "Starting up")
|
nodeOb.cLog(20, "Starting up")
|
||||||
nodeOb.cLog(20, "Staggering {0} seconds, please wait".format(sys.argv[4]))
|
nodeOb.cLog(20, "Staggering {0} seconds, please wait".format(sys.argv[4]))
|
||||||
time.sleep(int(sys.argv[4]))
|
time.sleep(int(sys.argv[4]))
|
||||||
|
@ -304,6 +423,7 @@ async def main():
|
||||||
nodeOb.onodeID,
|
nodeOb.onodeID,
|
||||||
nodeOb.network,
|
nodeOb.network,
|
||||||
nodeOb.cLog,
|
nodeOb.cLog,
|
||||||
|
nodeOb.cache,
|
||||||
)
|
)
|
||||||
nodeOb.mTasks["list"] = asyncio.create_task(nodeOb.spongeListen())
|
nodeOb.mTasks["list"] = asyncio.create_task(nodeOb.spongeListen())
|
||||||
await asyncio.sleep(1)
|
await asyncio.sleep(1)
|
||||||
|
@ -322,11 +442,12 @@ if __name__ == "__main__":
|
||||||
try:
|
try:
|
||||||
mainThread = threading.Thread(target=asyncio.run, args=(main(),))
|
mainThread = threading.Thread(target=asyncio.run, args=(main(),))
|
||||||
mainThread.start()
|
mainThread.start()
|
||||||
lplThread = threading.Thread(target=asyncio.run, args=(logPassLoop(),))
|
if showTUI:
|
||||||
lplThread.start()
|
lplThread = threading.Thread(target=asyncio.run, args=(logPassLoop(),))
|
||||||
tuiOb = TUI()
|
lplThread.start()
|
||||||
tuiOb.nodeOb = nodeOb
|
tuiOb = TUI()
|
||||||
tuiOb.run()
|
tuiOb.nodeOb = nodeOb
|
||||||
|
tuiOb.run()
|
||||||
except:
|
except:
|
||||||
try:
|
try:
|
||||||
nodeOb.cLog(30, traceback.format_exc())
|
nodeOb.cLog(30, traceback.format_exc())
|
||||||
|
|
|
@ -0,0 +1,296 @@
|
||||||
|
import base64
|
||||||
|
import os
|
||||||
|
from cryptography.fernet import Fernet
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import dh
|
||||||
|
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
||||||
|
from cryptography.hazmat.primitives.serialization import (
|
||||||
|
Encoding,
|
||||||
|
NoEncryption,
|
||||||
|
ParameterFormat,
|
||||||
|
PublicFormat,
|
||||||
|
PrivateFormat,
|
||||||
|
)
|
||||||
|
import cryptography.hazmat.primitives.serialization as Serialization
|
||||||
|
import msgpack
|
||||||
|
from Daisy.Store import Store
|
||||||
|
|
||||||
|
# TODO: Different store directories per node
|
||||||
|
|
||||||
|
|
||||||
|
class DHEFern:
|
||||||
|
"""
|
||||||
|
|
||||||
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py>`__
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
cLog
|
||||||
|
Method reference to `run.Node.cLog` so we can log to the ui from here
|
||||||
|
|
||||||
|
loadedParams: dict
|
||||||
|
In memory representations of cryptography parameters
|
||||||
|
|
||||||
|
loadedKeys: dict
|
||||||
|
In memory representations of cryptography keys
|
||||||
|
|
||||||
|
nodeNickname: str
|
||||||
|
Name of node for isolating configs when running multiple nodes
|
||||||
|
|
||||||
|
cache: Components.daisy.Cache
|
||||||
|
Daisy cache for use in storing cryptography information
|
||||||
|
|
||||||
|
publicKey
|
||||||
|
Public key for node
|
||||||
|
|
||||||
|
privateKey
|
||||||
|
Private key for node
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache, nodeNickname, cLog):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
cache: Components.daisy.Cache
|
||||||
|
Reference to the node instances Daisy cache
|
||||||
|
|
||||||
|
nodeNickname: str
|
||||||
|
Node nickname for record storage
|
||||||
|
|
||||||
|
cLog
|
||||||
|
Reference to `run.Node.cLog`
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.cLog = cLog
|
||||||
|
self.stores = {}
|
||||||
|
self.loadedParams = {}
|
||||||
|
self.loadedKeys = {}
|
||||||
|
self.nodeNickname = nodeNickname
|
||||||
|
self.cache = cache
|
||||||
|
if os.path.exists("daisy/cryptography/{0}/param".format(nodeNickname)) == False:
|
||||||
|
self.initStore("param")
|
||||||
|
else:
|
||||||
|
self.stores["param"] = Store("param", "cryptography", nodeNickname)
|
||||||
|
self.params = self.loadParamBytes(self.stores["param"].get()["self"])
|
||||||
|
self.cLog(20, "Param store initialized")
|
||||||
|
if os.path.exists("daisy/cryptography/{0}/key".format(nodeNickname)) == False:
|
||||||
|
self.cLog(20, "Key store DNE, initializing")
|
||||||
|
self.initStore("key")
|
||||||
|
self.genKeyPair()
|
||||||
|
else:
|
||||||
|
self.cLog(20, "Key store exists, loading")
|
||||||
|
self.stores["key"] = Store("key", "cryptography", nodeNickname)
|
||||||
|
self.cLog(20, "Store loaded")
|
||||||
|
# tks = self.stores["key"].get()
|
||||||
|
# self.publicKey = tks["self"]["publicKey"]
|
||||||
|
# self.privateKey = tks["self"]["privateKey"]
|
||||||
|
self.cLog(20, "Key store initialized")
|
||||||
|
|
||||||
|
def checkInMem(self, store: str, nodeID: str):
|
||||||
|
"""
|
||||||
|
Check if parameters or keys are loaded for node of nodeID
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
store: str
|
||||||
|
Whether to check loaded keys or parameters
|
||||||
|
|
||||||
|
"""
|
||||||
|
if store == "param":
|
||||||
|
return nodeID in self.loadedParams.keys()
|
||||||
|
elif store == "key":
|
||||||
|
return nodeID in self.loadedKeys.keys()
|
||||||
|
|
||||||
|
def loadRecordToMem(self, store: str, nodeID: str):
|
||||||
|
"""
|
||||||
|
Load record of nodeID from store to either keys or pameters
|
||||||
|
"""
|
||||||
|
r = self.getRecord(store, nodeID)
|
||||||
|
if r == False:
|
||||||
|
self.cLog(
|
||||||
|
30, "Tried to load nonexistent {0} for node {1}".format(store, nodeID)
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
elif self.checkInMem(store, nodeID):
|
||||||
|
self.cLog(10, "{0}s already deserialized, skipping".format(store))
|
||||||
|
else:
|
||||||
|
if store == "param":
|
||||||
|
self.loadedParams[nodeID] = self.loadParamBytes(r)
|
||||||
|
elif store == "key":
|
||||||
|
self.loadedKeys[nodeID] = {
|
||||||
|
"publicKey": Serialization.load_pem_public_key(r["publicKey"]),
|
||||||
|
"privateKey": Serialization.load_pem_private_key(
|
||||||
|
r["privateKey"], None
|
||||||
|
),
|
||||||
|
}
|
||||||
|
return True
|
||||||
|
|
||||||
|
def getRecord(self, store: str, key: str):
|
||||||
|
"""
|
||||||
|
Get record from store: store with key: key
|
||||||
|
"""
|
||||||
|
r = stores[store].getRecord(key)
|
||||||
|
if r == False:
|
||||||
|
self.cLog(20, "Record does not exist")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return r
|
||||||
|
|
||||||
|
def initStore(self, store: str):
|
||||||
|
"""
|
||||||
|
Initialize store: store
|
||||||
|
"""
|
||||||
|
self.stores[store] = Store(store, "cryptography", self.nodeNickname)
|
||||||
|
if store == "param":
|
||||||
|
self.genParams()
|
||||||
|
self.stores[store].update("self", self.getParamsBytes(), recur=False)
|
||||||
|
elif store == "key":
|
||||||
|
self.stores[store].update("self", {}, recur=False)
|
||||||
|
else:
|
||||||
|
self.cLog(30, "Store not defined")
|
||||||
|
|
||||||
|
def genParams(self):
|
||||||
|
"""
|
||||||
|
Generate Diffie Hellman parameters
|
||||||
|
"""
|
||||||
|
params = dh.generate_parameters(generator=2, key_size=2048)
|
||||||
|
self.params = params
|
||||||
|
return params
|
||||||
|
|
||||||
|
def getParamsBytes(self):
|
||||||
|
"""
|
||||||
|
Get bytes encoded from self.parameters (TODO: Encode from store)
|
||||||
|
"""
|
||||||
|
return self.params.parameter_bytes(Encoding.PEM, ParameterFormat.PKCS3)
|
||||||
|
|
||||||
|
def loadParamBytes(self, pemBytes: bytes):
|
||||||
|
"""
|
||||||
|
Load parameters to self.params from given bytes (TODO: Load from store)
|
||||||
|
"""
|
||||||
|
self.params = Serialization.load_pem_parameters(pemBytes)
|
||||||
|
return self.params
|
||||||
|
|
||||||
|
def genKeyPair(self, paramsOverride=False, setSelf: bool = True):
|
||||||
|
"""
|
||||||
|
Generate public and private keys from self.params (TODO: Gen from passed params)
|
||||||
|
|
||||||
|
paramsOverride
|
||||||
|
False or parameters to use (TODO)
|
||||||
|
|
||||||
|
setSelf: bool
|
||||||
|
Whether to set self.privateKey and self.publicKey
|
||||||
|
"""
|
||||||
|
privateKey = self.params.generate_private_key()
|
||||||
|
if setSelf:
|
||||||
|
self.privateKey = privateKey
|
||||||
|
publicKey = privateKey.public_key()
|
||||||
|
if setSelf:
|
||||||
|
self.publicKey = publicKey
|
||||||
|
self.stores["key"].update(
|
||||||
|
"self",
|
||||||
|
{
|
||||||
|
"publicKey": self.publicKey.public_bytes(
|
||||||
|
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
|
||||||
|
),
|
||||||
|
"privateKey": self.privateKey.private_bytes(
|
||||||
|
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return [privateKey, publicKey]
|
||||||
|
else:
|
||||||
|
publicKey = publicKey.public_bytes(
|
||||||
|
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
|
||||||
|
)
|
||||||
|
privateKey = privateKey.private_bytes(
|
||||||
|
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
|
||||||
|
)
|
||||||
|
return [privateKey, publicKey]
|
||||||
|
|
||||||
|
def keyDerive(self, pubKey: bytes, salt: bytes, nodeID: str, params: bytes):
|
||||||
|
"""
|
||||||
|
Derive shared key using Diffie Hellman
|
||||||
|
|
||||||
|
pubKey: bytes
|
||||||
|
Public key
|
||||||
|
|
||||||
|
nodeID: str
|
||||||
|
PierMesh node ID
|
||||||
|
|
||||||
|
params: bytes
|
||||||
|
Encryption parameters
|
||||||
|
"""
|
||||||
|
if self.checkInMem("param", nodeID) == False:
|
||||||
|
if self.getRecord("param", nodeID) == False:
|
||||||
|
self.updateStore("param", nodeID, params, recur=False)
|
||||||
|
self.loadRecordToMem("param", nodeID)
|
||||||
|
self.cLog(20, "Precheck done for key derivation")
|
||||||
|
|
||||||
|
# TODO: Load them and if private key exists load it, otherwise generate a private key
|
||||||
|
if self.checkInMem("key", nodeID) == False:
|
||||||
|
if self.getRecord("key", nodeID) == False:
|
||||||
|
privateKey, publicKey = self.genKeyPair(setSelf=False)
|
||||||
|
self.updateStore(
|
||||||
|
"key", nodeID, {"publicKey": publicKey, "privateKey": privateKey}
|
||||||
|
)
|
||||||
|
self.loadRecordToMem("key", nodeID)
|
||||||
|
|
||||||
|
sharedKey = self.loadedKeys[nodeID]["privateKey"].exchange(
|
||||||
|
Serialization.load_pem_public_key(pubKey)
|
||||||
|
)
|
||||||
|
# Perform key derivation.
|
||||||
|
self.cLog(20, "Performing key derivation")
|
||||||
|
derivedKey = HKDF(
|
||||||
|
algorithm=hashes.SHA256(), length=32, salt=salt, info=b"handshake data"
|
||||||
|
).derive(sharedKey)
|
||||||
|
self.cLog(20, "Derived key")
|
||||||
|
ederivedKey = base64.urlsafe_b64encode(derivedKey)
|
||||||
|
tr = self.getRecord("key", nodeID)
|
||||||
|
tr["derivedKey"] = ederivedKey
|
||||||
|
self.updateStore("key", nodeID, tr)
|
||||||
|
self.cLog(20, "Done with cryptography store updates")
|
||||||
|
return ederivedKey
|
||||||
|
|
||||||
|
def getSalt(self):
|
||||||
|
"""
|
||||||
|
Get random salt
|
||||||
|
"""
|
||||||
|
return os.urandom(16)
|
||||||
|
|
||||||
|
# TODO: Build in transport security (node/node)
|
||||||
|
def encrypt(self, data, nodeID: str, isDict: bool = True):
|
||||||
|
"""
|
||||||
|
Do Fernet encryption
|
||||||
|
|
||||||
|
data
|
||||||
|
Either bytes or dict to encrypt
|
||||||
|
|
||||||
|
isDict: bool
|
||||||
|
Whether data is a dictionary
|
||||||
|
"""
|
||||||
|
r = self.getRecord("key", nodeID)
|
||||||
|
if r == False:
|
||||||
|
self.cLog(20, "Node {0} not in keystore".format(nodeID))
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
derivedKey = r["derivedKey"]
|
||||||
|
fernet = Fernet(derivedKey)
|
||||||
|
if isDict:
|
||||||
|
data = msgpack.dumps(data)
|
||||||
|
token = fernet.encrypt(data)
|
||||||
|
return token
|
||||||
|
|
||||||
|
def decrypt(self, data, nodeID: str):
|
||||||
|
"""
|
||||||
|
Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load)
|
||||||
|
"""
|
||||||
|
r = self.getRecord("key", nodeID)
|
||||||
|
if r == False:
|
||||||
|
self.cLog(20, "No record of node " + nodeID)
|
||||||
|
return False
|
||||||
|
elif not "derivedKey" in r.keys():
|
||||||
|
self.cLog(20, "No key derived for node " + nodeID)
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
fernet = Fernet(self.getRecord("key", nodeID)["derivedKey"])
|
||||||
|
return msgpack.loads(fernet.decrypt(data))
|
23
src/ui.py
23
src/ui.py
|
@ -1,5 +1,5 @@
|
||||||
from textual.app import App, ComposeResult
|
from textual.app import App, ComposeResult
|
||||||
from textual.widgets import Log, Label, Footer, Header, ProgressBar
|
from textual.widgets import Log, Label, Footer, Header, ProgressBar, Input, Button
|
||||||
from textual.binding import Binding
|
from textual.binding import Binding
|
||||||
from textual.containers import Horizontal, Vertical
|
from textual.containers import Horizontal, Vertical
|
||||||
import sys, os
|
import sys, os
|
||||||
|
@ -24,6 +24,7 @@ class TUI(App):
|
||||||
Whether the TUI has been killed
|
Whether the TUI has been killed
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
todo = []
|
||||||
visibleLogo = True
|
visibleLogo = True
|
||||||
nodeOb = None
|
nodeOb = None
|
||||||
done = False
|
done = False
|
||||||
|
@ -70,7 +71,15 @@ class TUI(App):
|
||||||
Load the ascii art for display on the left label
|
Load the ascii art for display on the left label
|
||||||
"""
|
"""
|
||||||
yield Header(icon="P")
|
yield Header(icon="P")
|
||||||
yield Label(ascii, classes="largeLabel", name="logo", id="logo")
|
yield Vertical(
|
||||||
|
Label(ascii, classes="largeLabel", name="logo", id="logo"),
|
||||||
|
Label("Add/set pre shared key for node\n"),
|
||||||
|
Label("Node ID:"),
|
||||||
|
Input(placeholder="000000", type="integer", max_length=6, name="pskNodeID", id="pskNodeID")
|
||||||
|
Label("PSK:"),
|
||||||
|
Input(type="text", max_length=6, name="psk", id="psk")
|
||||||
|
Button("Add/set PSK", name="addPSK", id="addPSK"),
|
||||||
|
)
|
||||||
yield Vertical(
|
yield Vertical(
|
||||||
Log(auto_scroll=True, classes="baseLog"),
|
Log(auto_scroll=True, classes="baseLog"),
|
||||||
Label("CPU usage:", name="cpul", id="cpul"),
|
Label("CPU usage:", name="cpul", id="cpul"),
|
||||||
|
@ -79,6 +88,16 @@ class TUI(App):
|
||||||
)
|
)
|
||||||
yield Footer()
|
yield Footer()
|
||||||
|
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
if event.button.id == "addPSK":
|
||||||
|
self.todo.append({
|
||||||
|
"action": "addPSK",
|
||||||
|
"data": {
|
||||||
|
"nodeID": self.query_one("#pskNodeID").value.zpad(6),
|
||||||
|
"PSK": self.query_one("#PSK").value
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
def do_write_line(self, logLine: str):
|
def do_write_line(self, logLine: str):
|
||||||
"""
|
"""
|
||||||
Write line to the logs panel
|
Write line to the logs panel
|
||||||
|
|
Loading…
Reference in New Issue