Prototype cleanup 1
This commit is contained in:
parent
66f3cbe401
commit
46a9cc3ee9
|
@ -6,13 +6,18 @@
|
||||||
|
|
||||||
<a id="module-Components.hopper"></a>
|
<a id="module-Components.hopper"></a>
|
||||||
|
|
||||||
### Components.hopper.get(url: str, params=None)
|
### Components.hopper.downloadFile(url, text=True, mimeType=None)
|
||||||
|
|
||||||
|
Download resource from url and convert it to text or a data url
|
||||||
|
|
||||||
|
### Components.hopper.get(url: str, params=None, followTags=None)
|
||||||
|
|
||||||
http/s get request
|
http/s get request
|
||||||
|
|
||||||
* **Parameters:**
|
* **Parameters:**
|
||||||
* **url** (*str*)
|
* **url** (*str*)
|
||||||
* **params** – Requests (library) parameters
|
* **params** – Requests (library) parameters
|
||||||
|
* **followTags** – None or list of tags to download the src/href from
|
||||||
|
|
||||||
### Components.hopper.post(url: str, params=None)
|
### Components.hopper.post(url: str, params=None)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
<a id="context"></a>
|
||||||
|
|
||||||
|
# Context
|
||||||
|
|
||||||
|
### *class* Config.Context.Context(subsets: dict = {}, \*\*kwargs)
|
||||||
|
|
||||||
|
Generic context data structure, current subclassed for use in filters, see Sponge/Protocols/Yellow.py
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Config/Context.py)
|
||||||
|
|
||||||
|
#### ctx
|
||||||
|
|
||||||
|
Dictionary of context values
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
|
@ -1,22 +1,11 @@
|
||||||
<a id="whalesong-diffie-hellman-ephemeral-fernet-based-encryption"></a>
|
<a id="whalesong"></a>
|
||||||
|
|
||||||
# WhaleSong: Diffie hellman ephemeral Fernet based encryption
|
# WhaleSong
|
||||||
|
|
||||||
### *class* Cryptography.WhaleSong.DHEFern(cache, nodeNickname, cLog)
|
### *class* Cryptography.WhaleSong.Transport(cache, nodeNickname, daisyCryptography, psk)
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py)
|
||||||
|
|
||||||
#### cLog
|
|
||||||
|
|
||||||
Method reference to run.Node.cLog so we can log to the ui from here
|
|
||||||
|
|
||||||
#### loadedParams
|
|
||||||
|
|
||||||
In memory representations of cryptography parameters
|
|
||||||
|
|
||||||
* **Type:**
|
|
||||||
dict
|
|
||||||
|
|
||||||
#### loadedKeys
|
#### loadedKeys
|
||||||
|
|
||||||
In memory representations of cryptography keys
|
In memory representations of cryptography keys
|
||||||
|
@ -36,7 +25,7 @@ Name of node for isolating configs when running multiple nodes
|
||||||
Daisy cache for use in storing cryptography information
|
Daisy cache for use in storing cryptography information
|
||||||
|
|
||||||
* **Type:**
|
* **Type:**
|
||||||
Components.daisy.Cache
|
[Daisy.Cache.Cache](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md#Daisy.Cache.Cache)
|
||||||
|
|
||||||
#### publicKey
|
#### publicKey
|
||||||
|
|
||||||
|
@ -46,20 +35,52 @@ Public key for node
|
||||||
|
|
||||||
Private key for node
|
Private key for node
|
||||||
|
|
||||||
#### checkInMem(store: str, nodeID: str)
|
#### daisyCryptography
|
||||||
|
|
||||||
Check if parameters or keys are loaded for node of nodeID
|
Record cryptography reference
|
||||||
|
|
||||||
* **Parameters:**
|
* **Type:**
|
||||||
**store** (*str*) – Whether to check loaded keys or parameters
|
[Daisy.CryptographyUtil.SteelPetal](/PierMesh/piermesh/src/branch/main/docs/Daisy/CryptographyUtil.md#Daisy.CryptographyUtil.SteelPetal)
|
||||||
|
|
||||||
#### decrypt(data, nodeID: str)
|
#### addPeerEphemeralKey(onodeID, peerEphemeralKey: bytes)
|
||||||
|
|
||||||
Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load)
|
Add a peer node’s epehemeral key for session encryption
|
||||||
|
|
||||||
#### encrypt(data, nodeID: str, isDict: bool = True)
|
onodeID
|
||||||
|
: Node identifier
|
||||||
|
|
||||||
Do Fernet encryption
|
peerEphemeralKey: bytes
|
||||||
|
: Serialized ephemeral key
|
||||||
|
|
||||||
|
#### addPublickey(onodeID, publicKey, forSelf: bool = False)
|
||||||
|
|
||||||
|
Add a public key for a given node including this one
|
||||||
|
|
||||||
|
onodeID
|
||||||
|
: Node identifier
|
||||||
|
|
||||||
|
publicKey
|
||||||
|
: Public key to add
|
||||||
|
|
||||||
|
forSelf: bool
|
||||||
|
: Whether to add key for this node
|
||||||
|
|
||||||
|
#### decrypt(data, onodeID: str, nonce, tag)
|
||||||
|
|
||||||
|
Decrypt bytes and return either str or dict depending on result
|
||||||
|
|
||||||
|
onodeID: str
|
||||||
|
: Node identifier
|
||||||
|
|
||||||
|
nonce
|
||||||
|
: Encryption nonce
|
||||||
|
|
||||||
|
tag
|
||||||
|
: Encryption tag
|
||||||
|
|
||||||
|
#### encrypt(data, nodeID: str, isDict: bool = True, pskEncrypt=False)
|
||||||
|
|
||||||
|
Encrypt given data with AES GCM
|
||||||
|
|
||||||
data
|
data
|
||||||
: Either bytes or dict to encrypt
|
: Either bytes or dict to encrypt
|
||||||
|
@ -67,53 +88,42 @@ data
|
||||||
isDict: bool
|
isDict: bool
|
||||||
: Whether data is a dictionary
|
: Whether data is a dictionary
|
||||||
|
|
||||||
#### genKeyPair(paramsOverride=False, setSelf: bool = True)
|
pskEncrypt: bool
|
||||||
|
: Whether to encrypt with pre-shared key
|
||||||
|
|
||||||
Generate public and private keys from self.params (TODO: Gen from passed params)
|
#### genOurEphemeralKey(onodeID)
|
||||||
|
|
||||||
paramsOverride
|
Generate epehemeral key for session encryption with given node
|
||||||
: False or parameters to use (TODO)
|
|
||||||
|
|
||||||
setSelf: bool
|
#### genStaticKey(onodeID)
|
||||||
: Whether to set self.privateKey and self.publicKey
|
|
||||||
|
|
||||||
#### genParams()
|
Generate static key for session encryption with given node
|
||||||
|
|
||||||
Generate Diffie Hellman parameters
|
#### generateSessionKey(onodeID)
|
||||||
|
|
||||||
#### getParamsBytes()
|
Generate session key for transport encryption
|
||||||
|
|
||||||
Get bytes encoded from self.parameters (TODO: Encode from store)
|
onodeID
|
||||||
|
: Node identifier
|
||||||
|
|
||||||
#### getRecord(store: str, key: str)
|
#### getRecord(store: str, key: str, ephemeral=False)
|
||||||
|
|
||||||
Get record from store: store with key: key
|
Get record from store: store with key: key
|
||||||
|
|
||||||
#### getSalt()
|
|
||||||
|
|
||||||
Get random salt
|
|
||||||
|
|
||||||
#### initStore(store: str)
|
#### initStore(store: str)
|
||||||
|
|
||||||
Initialize store: store
|
Initialize store: store
|
||||||
|
|
||||||
#### keyDerive(pubKey: bytes, salt: bytes, nodeID: str, params: bytes)
|
#### kdf(bytesX)
|
||||||
|
|
||||||
Derive shared key using Diffie Hellman
|
Key derivation function
|
||||||
|
|
||||||
pubKey: bytes
|
#### sessionSetup(onodeID, peerEphemeralKey: bytes)
|
||||||
: Public key
|
|
||||||
|
|
||||||
nodeID: str
|
Set up transport encryption session
|
||||||
: PierMesh node ID
|
|
||||||
|
|
||||||
params: bytes
|
onodeID
|
||||||
: Encryption parameters
|
: Node identifier
|
||||||
|
|
||||||
#### loadParamBytes(pemBytes: bytes)
|
peerEphemeralKey: bytes
|
||||||
|
: Serialized ephemeral key
|
||||||
Load parameters to self.params from given bytes (TODO: Load from store)
|
|
||||||
|
|
||||||
#### loadRecordToMem(store: str, nodeID: str)
|
|
||||||
|
|
||||||
Load record of nodeID from store to either keys or pameters
|
|
||||||
|
|
|
@ -2,19 +2,20 @@
|
||||||
|
|
||||||
# Daisy based cache
|
# Daisy based cache
|
||||||
|
|
||||||
### *class* Daisy.Cache.Cache(filepaths=None, cacheFile=None, path: str = 'daisy', walk: bool = False, isCatch: bool = False)
|
### *class* Daisy.Cache.Cache(daisyCryptography, filepaths=None, cacheFile=None, path: str = 'daisy', walk: bool = False)
|
||||||
|
|
||||||
In memory collection of Daisy records
|
In memory collection of Daisy records, provides a search functionality currently utilized by Daisy.Catch.Catch
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Cache.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Cache.py)
|
||||||
|
|
||||||
#### create(path: str, data: dict)
|
#### create(path: str, data: dict, remote=False)
|
||||||
|
|
||||||
Create new record
|
Create new record
|
||||||
|
|
||||||
* **Parameters:**
|
* **Parameters:**
|
||||||
* **path** (*str*) – Path to create record at
|
* **path** (*str*) – Path to create record at
|
||||||
* **data** (*dict*) – Data to populate record with
|
* **data** (*dict*) – Data to populate record with
|
||||||
|
* **remote** (*bool*) – Whether this is a reference to a distributed file (not implemented yet)
|
||||||
|
|
||||||
#### get(path: str)
|
#### get(path: str)
|
||||||
|
|
||||||
|
@ -29,10 +30,10 @@ Reload from disk to memory
|
||||||
|
|
||||||
#### search(keydict: dict, strict: bool = True)
|
#### search(keydict: dict, strict: bool = True)
|
||||||
|
|
||||||
Search cache for record for records with values
|
Search cache for record for records with keys and values matching those
|
||||||
|
in the keydict
|
||||||
|
|
||||||
keydict: dict
|
keydict: dict
|
||||||
: Values to search for
|
|
||||||
|
|
||||||
strict: bool
|
strict: bool
|
||||||
: Whether to require values match
|
: Whether to require all keys/values match
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
# Daisy Catch cache
|
# Daisy Catch cache
|
||||||
|
|
||||||
### *class* Daisy.Catch.Catch(path: str = 'catch', filepaths=None, catchFile=None, walk: bool = False)
|
### *class* Daisy.Catch.Catch(daisyCryptography, path: str = 'catch', filepaths=None, catchFile=None, walk: bool = False)
|
||||||
|
|
||||||
Sub class of Cache for handling catchs
|
Sub class of Cache for handling catchs
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ Sub class of Cache for handling catchs
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Catch.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Catch.py)
|
||||||
|
|
||||||
#### get(head: str, tail: str, fins=None)
|
#### get(head: str, body: str, fins=None)
|
||||||
|
|
||||||
Get catch by pieces
|
Get catch by pieces
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="credential"></a>
|
||||||
|
|
||||||
|
# Credential
|
||||||
|
|
||||||
|
### *class* Daisy.Credential.Credential(nodeNickname, credentialName, extension, daisyCryptography)
|
||||||
|
|
||||||
|
Currently unused credential class, will be fleshed out for credentialed access to the web ui
|
|
@ -0,0 +1,28 @@
|
||||||
|
<a id="cryptographyutil"></a>
|
||||||
|
|
||||||
|
# CryptographyUtil
|
||||||
|
|
||||||
|
### *class* Daisy.CryptographyUtil.SteelPetal(key: str, nonce=None, testData=None)
|
||||||
|
|
||||||
|
Cryptography utility for encrypting files
|
||||||
|
|
||||||
|
#### decrypt(data: bytes)
|
||||||
|
|
||||||
|
Decrypt encrypted binary data
|
||||||
|
|
||||||
|
data: bytes
|
||||||
|
: Data to decrypt
|
||||||
|
|
||||||
|
#### encrypt(data: bytes)
|
||||||
|
|
||||||
|
Encrypt binary data
|
||||||
|
|
||||||
|
data: bytes
|
||||||
|
: Data to encrypt
|
||||||
|
|
||||||
|
#### pad(key: str)
|
||||||
|
|
||||||
|
Pad key to make it usable
|
||||||
|
|
||||||
|
key: str
|
||||||
|
: User’s plain text key
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
# Daisy
|
# Daisy
|
||||||
|
|
||||||
### *class* Daisy.Daisy.Daisy(filepath: str, templates: dict = {}, template: bool = False, prefillDict: bool = False)
|
### *class* Daisy.Daisy.Daisy(filepath: str, daisyCryptography, templates: dict = {}, template: bool = False, prefillDict: bool = False, remote=False)
|
||||||
|
|
||||||
Base class for Daisy data representation
|
Base class for Daisy data representation
|
||||||
|
|
||||||
|
@ -33,6 +33,13 @@ Get record dictionary from memory
|
||||||
* **Return type:**
|
* **Return type:**
|
||||||
dict
|
dict
|
||||||
|
|
||||||
|
#### json_to_msg(path: str)
|
||||||
|
|
||||||
|
Convert json at the path plus .json to a msgpack binary
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
**path** (*str*) – Path to json minus the extension
|
||||||
|
|
||||||
#### read(decrypt: bool = False, decryptKey=False)
|
#### read(decrypt: bool = False, decryptKey=False)
|
||||||
|
|
||||||
Read record from disk to memory
|
Read record from disk to memory
|
||||||
|
@ -47,7 +54,7 @@ Lists contents of directory if object is a directory, otherwise return None
|
||||||
|
|
||||||
#### write(override=False, encrypt: bool = False, encryptKey=None, recur: bool = False)
|
#### write(override=False, encrypt: bool = False, encryptKey=None, recur: bool = False)
|
||||||
|
|
||||||
Write record to disk
|
Write record to disk, note: use override with updated record to update record
|
||||||
|
|
||||||
* **Parameters:**
|
* **Parameters:**
|
||||||
* **override** – Either false or a dictionary of values to set on the record
|
* **override** – Either false or a dictionary of values to set on the record
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
<a id="index"></a>
|
||||||
|
|
||||||
|
# Index
|
||||||
|
|
||||||
|
### *class* Daisy.Index.Index(nodeNickname: str, daisyCryptography, prefill: list = [], indexedFields: list = [], autoIndex: bool = True)
|
||||||
|
|
||||||
|
A searchable index of records, this is currently only half implemented
|
||||||
|
but works enough to hold our remote catch index
|
||||||
|
|
||||||
|
#### addEntry(entry: dict)
|
||||||
|
|
||||||
|
Add a record to the index
|
||||||
|
|
||||||
|
entry: dict
|
||||||
|
: Record to add to the index
|
||||||
|
|
||||||
|
#### search(keydict: dict, strict: bool = True)
|
||||||
|
|
||||||
|
Search index for record for records with values
|
||||||
|
|
||||||
|
keydict: dict
|
||||||
|
: Keys/Values to search for
|
||||||
|
|
||||||
|
strict: bool
|
||||||
|
: Whether to require all keys/values match
|
|
@ -0,0 +1,13 @@
|
||||||
|
<a id="ref"></a>
|
||||||
|
|
||||||
|
# Ref
|
||||||
|
|
||||||
|
### *class* Daisy.Ref.Ref(metadata: dict, path: str, remoteNodeID: str)
|
||||||
|
|
||||||
|
Reference to a remote record
|
||||||
|
|
||||||
|
metadata: dict
|
||||||
|
: Data to fill record with, should only be metadata
|
||||||
|
|
||||||
|
path: str
|
||||||
|
: Where to store data locally
|
|
@ -1,16 +0,0 @@
|
||||||
<a id="soil-daisy-signal-management"></a>
|
|
||||||
|
|
||||||
# Soil: Daisy signal management
|
|
||||||
|
|
||||||
### *class* Daisy.Soil.Compound(cache, isCatch: bool = False)
|
|
||||||
|
|
||||||
File system watcher to propagate disk changes
|
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Soil.py)
|
|
||||||
|
|
||||||
#### on_any_event(event)
|
|
||||||
|
|
||||||
Called when a CRUD operation is performed on a record file
|
|
||||||
|
|
||||||
* **Parameters:**
|
|
||||||
**event** – Event object provided by watchdog
|
|
|
@ -2,12 +2,39 @@
|
||||||
|
|
||||||
# Store: Daisy key value store
|
# Store: Daisy key value store
|
||||||
|
|
||||||
### *class* Daisy.Store.Store(store: str, path: str, nodeNickname: str)
|
### *class* Daisy.Store.Store(store: str, path: str, nodeNickname: str, daisyCryptography)
|
||||||
|
|
||||||
Key value store
|
Key value store
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Store.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Store.py)
|
||||||
|
|
||||||
#### getRecord(key: str)
|
#### epehemeral
|
||||||
|
|
||||||
#### update(entry: str, data, recur: bool = True)
|
Memory only records
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### createEmpty(key: str)
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
* **key** (*str*)
|
||||||
|
* **key** – Key to create empty record at
|
||||||
|
|
||||||
|
#### getRecord(key: str, ephemeral=False)
|
||||||
|
|
||||||
|
Get record at key
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
* **key** (*str*)
|
||||||
|
* **ephemeral** (*bool*) – Whether key is only in memory, used for session cryptography credentials currently
|
||||||
|
|
||||||
|
#### update(entry: str, data, recur: bool = True, write=True)
|
||||||
|
|
||||||
|
Update given record
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
* **entry** (*str*) – Key to update record of
|
||||||
|
* **data** – Data to update record with
|
||||||
|
* **recur** (*bool*) – Whether to iterate over data
|
||||||
|
* **write** (*bool*) – Whether record is ephemeral
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
# Header packet: Metadata packet
|
# Header packet: Metadata packet
|
||||||
|
|
||||||
### *class* Packets.HeaderPacket.Header(packetsID: int, packetCount: int, sender: int, senderDisplayName: int, recipient: int, recipientNode: int, subpacket: bool = False, wantFullResponse: bool = False, packetsClass: int = 0, pAction: int = -1)
|
### *class* Packets.HeaderPacket.Header(packetsID: int, packetCount: int, sender: int, senderDisplayName: int, sourceNode: int, recipient: int, recipientNode: int, wantFullResponse: bool = False, packetsClass: int = 0, pAction: int = -1, target=True)
|
||||||
|
|
||||||
Metadata packet for messages
|
Metadata packet for messages
|
||||||
|
|
||||||
|
@ -52,7 +52,7 @@ Whether a response should be sent when the message completes reception (TODO)
|
||||||
|
|
||||||
#### pAction
|
#### pAction
|
||||||
|
|
||||||
3 digit (maximum) pAction ID for mapping precise actions within a protocol (TODO)
|
3 digit (maximum) pAction ID for mapping precise actions within a protocol
|
||||||
|
|
||||||
* **Type:**
|
* **Type:**
|
||||||
int
|
int
|
||||||
|
@ -61,6 +61,6 @@ Whether a response should be sent when the message completes reception (TODO)
|
||||||
|
|
||||||
Dump packet to msgpack encoded binary for transmission
|
Dump packet to msgpack encoded binary for transmission
|
||||||
|
|
||||||
#### usePreset(path: str)
|
#### usePreset(path: str, daisyCryptography)
|
||||||
|
|
||||||
Add preset fields to the packet
|
Add preset fields to the packet, currently unused
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="bubble-bubble"></a>
|
||||||
|
|
||||||
|
# bubble.Bubble
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.bubble.Bubble.Bubble(sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo, data)
|
||||||
|
|
||||||
|
Send data from peer to peer
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="catch-indexsync"></a>
|
||||||
|
|
||||||
|
# catch.IndexSync
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.catch.IndexSync.IndexSync(sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo, index, target=False)
|
||||||
|
|
||||||
|
Sync indices of Catchs across nodes
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="catch-request"></a>
|
||||||
|
|
||||||
|
# catch.Request
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.catch.Request.CatchRequest(sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo, head, body, fins, pskEncrypt=False)
|
||||||
|
|
||||||
|
Request Catch (website) from another node
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="catch-response"></a>
|
||||||
|
|
||||||
|
# catch.Response
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.catch.Response.CatchResponse(sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo, html, pskEncrypt=False)
|
||||||
|
|
||||||
|
Send local Catch (website) to user who requested it
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="cryptography-handshake"></a>
|
||||||
|
|
||||||
|
# cryptography.Handshake
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.cryptography.Handshake.Handshake(sender, senderID, recipient, recipientNode, cryptographyInfo, onodeID, sourceNode)
|
||||||
|
|
||||||
|
Provides the ephemeral key for session encryption
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="hopper-request"></a>
|
||||||
|
|
||||||
|
# hopper.Request
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.hopper.Request.HopperRequest(sender, senderID, sourceNode, recipient, recipientNode, url: str, params: dict, method: str, cryptographyInfo)
|
||||||
|
|
||||||
|
Proxy request to main internet from remote node
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="hopper-response"></a>
|
||||||
|
|
||||||
|
# hopper.Response
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.hopper.Response.HopperResponse(sender, senderID, sourceNode, recipient, recipientNode, response, cryptographyInfo)
|
||||||
|
|
||||||
|
Send proxied request back to requester
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="map-announce"></a>
|
||||||
|
|
||||||
|
# map.Announce
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.map.Announce.AnnounceMessage(sender, senderID, sourceNode, cryptographyInfo, mapping)
|
||||||
|
|
||||||
|
Announce the network map details and public key of the node for discovery
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
# Packet: Base packet
|
# Packet: Base packet
|
||||||
|
|
||||||
### *class* Packets.Packet.Packet(data: bytes, packetsID: int = -1, packetNumber=False, packetCount: int = 1, packetsClass: int = -1)
|
### *class* Packets.Packet.Packet(data: bytes, packetsID: int = -1, packetNumber=False, packetCount: int = 1, packetsClass: int = -1, primaryMessage=None)
|
||||||
|
|
||||||
Base class for Packets
|
Base class for Packets
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,6 @@
|
||||||
|
|
||||||
# SubMessage: Additional data for larger messages
|
# SubMessage: Additional data for larger messages
|
||||||
|
|
||||||
### *class* Packets.SubMessage.SubMessage
|
### *class* Packets.SubMessage.SubMessage(sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo, protocolID, pAction, data, target=True, primaryMessage=None)
|
||||||
|
|
||||||
TODO
|
SubMessage to a primary message, enables us to send more/dynamic data
|
||||||
|
|
|
@ -2,6 +2,6 @@
|
||||||
|
|
||||||
# SubPacket: Packets for submessages
|
# SubPacket: Packets for submessages
|
||||||
|
|
||||||
### *class* Packets.SubPacket.SubPacket
|
### *class* Packets.SubMessage.SubMessage(sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo, protocolID, pAction, data, target=True, primaryMessage=None)
|
||||||
|
|
||||||
TODO
|
SubMessage to a primary message, enables us to send more/dynamic data
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
<a id="services-action"></a>
|
||||||
|
|
||||||
|
# Services.Action
|
||||||
|
|
||||||
|
### *class* Services.Action.Action(action, data, sender=None, senderID=None, sourceNode=None, recipient=None, recipientNode=None)
|
||||||
|
|
||||||
|
Generic action class for triggering actions from sub processes on the main thread
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Services/Action.py)
|
||||||
|
|
||||||
|
#### action
|
||||||
|
|
||||||
|
Action to run
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
str
|
||||||
|
|
||||||
|
#### data
|
||||||
|
|
||||||
|
Data to pass to action
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### sender
|
||||||
|
|
||||||
|
Sender identifier
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
str
|
||||||
|
|
||||||
|
#### senderID
|
||||||
|
|
||||||
|
Sender second level identifier
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
str
|
||||||
|
|
||||||
|
#### sourceNode
|
||||||
|
|
||||||
|
Sending node
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
str
|
||||||
|
|
||||||
|
#### recipient
|
||||||
|
|
||||||
|
Peer identifier to route to
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
str
|
||||||
|
|
||||||
|
#### recipientNode
|
||||||
|
|
||||||
|
Intended destination node identifier
|
||||||
|
|
||||||
|
#### getAction()
|
||||||
|
|
||||||
|
#### getData()
|
|
@ -36,7 +36,7 @@ Map of PierMesh node IDs to MeshTastic node IDs
|
||||||
* **Type:**
|
* **Type:**
|
||||||
dict
|
dict
|
||||||
|
|
||||||
#### addLookup(onodeID: str, mnodeID: str)
|
#### *async* addLookup(onodeID: str, mnodeID: str)
|
||||||
|
|
||||||
Adds node to lookup
|
Adds node to lookup
|
||||||
|
|
||||||
|
@ -92,3 +92,11 @@ Import map from path
|
||||||
#### render(pathPrefix: str = '')
|
#### render(pathPrefix: str = '')
|
||||||
|
|
||||||
Render outer and inner network map to disk at the given path prefix
|
Render outer and inner network map to disk at the given path prefix
|
||||||
|
|
||||||
|
#### syncaddLookup(onodeID: str, mnodeID: str)
|
||||||
|
|
||||||
|
Adds node to lookup
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
* **onodeID** (*str*) – Internal nodeID
|
||||||
|
* **mnodeID** (*str*) – MeshTastic nodeID
|
||||||
|
|
|
@ -2,22 +2,18 @@
|
||||||
|
|
||||||
# serve: Web UI server
|
# serve: Web UI server
|
||||||
|
|
||||||
### *class* Splash.serve.Server(transceiver, catch, onodeID, network, cLog)
|
### *class* Splash.serve.Server(transceiver, catch, onodeID, network, cryptographyInfo, remoteCatchIndex, cache)
|
||||||
|
|
||||||
Web server that serves the web ui and provides web to node communication
|
Web server that serves the web ui and provides web to node communication
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/Sponge/serve.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/Sponge/serve.py)
|
||||||
|
|
||||||
#### cLog
|
#### transceiver
|
||||||
|
|
||||||
Reference to run.Node.cLog for logging
|
Reference to our Transceiver.Transceiver.Transceiver instance
|
||||||
|
|
||||||
#### transmitter
|
|
||||||
|
|
||||||
Reference to our Transmission.transmission.Transmitter instance
|
|
||||||
|
|
||||||
* **Type:**
|
* **Type:**
|
||||||
Transmission.transmission.Transmitter
|
[Transceiver.Transceiver.Transceiver](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver)
|
||||||
|
|
||||||
#### network
|
#### network
|
||||||
|
|
||||||
|
@ -51,6 +47,10 @@ Reference to our Catch Cache instance to pull from for serving Catchs
|
||||||
* **Type:**
|
* **Type:**
|
||||||
[Daisy.Catch.Catch](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch)
|
[Daisy.Catch.Catch](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch)
|
||||||
|
|
||||||
#### *async* sendToPeer(peerID: str, data: str)
|
#### *async* getPSKs()
|
||||||
|
|
||||||
|
Get all PSKs for display
|
||||||
|
|
||||||
|
#### *async* sendToPeer(peerID: str, data: str, target: str)
|
||||||
|
|
||||||
Send data to Websocket of peer with peerID
|
Send data to Websocket of peer with peerID
|
||||||
|
|
|
@ -0,0 +1,86 @@
|
||||||
|
<a id="sponge-protocols-yellow"></a>
|
||||||
|
|
||||||
|
# Sponge.Protocols.Yellow
|
||||||
|
|
||||||
|
### *class* Sponge.Protocols.Yellow.YCTX(packetsID, packetCount, pAction, todo, cryptographyInfo, sourceNode, subMessage=False, subMessages={}, submessagesIDs=[], eData=None)
|
||||||
|
|
||||||
|
Context data structure for message parsing
|
||||||
|
|
||||||
|
### *class* Sponge.Protocols.Yellow.Yellow(yctx: [YCTX](#Sponge.Protocols.Yellow.YCTX))
|
||||||
|
|
||||||
|
Message parser that’s subclassed to easily make parsers for specific protocols
|
||||||
|
|
||||||
|
#### yctx
|
||||||
|
|
||||||
|
Message parsing context
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
[YCTX](#Sponge.Protocols.Yellow.YCTX)
|
||||||
|
|
||||||
|
#### message
|
||||||
|
|
||||||
|
#### submessages
|
||||||
|
|
||||||
|
Dictionary of submessages
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### submessagesIDs
|
||||||
|
|
||||||
|
List of required submessages
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
list
|
||||||
|
|
||||||
|
#### finishedSubmessages
|
||||||
|
|
||||||
|
Dictionary of finished submessages
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### dataOrder
|
||||||
|
|
||||||
|
List that maps packets based on their received order
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
list
|
||||||
|
|
||||||
|
#### data
|
||||||
|
|
||||||
|
Data of primary message
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
list
|
||||||
|
|
||||||
|
#### nonce
|
||||||
|
|
||||||
|
Cryptography artifact for decrypting message
|
||||||
|
|
||||||
|
#### tag
|
||||||
|
|
||||||
|
Cryptography artifact for decrypting message
|
||||||
|
|
||||||
|
#### gotHead
|
||||||
|
|
||||||
|
Whether we’ve gotten the head/header packet
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
bool
|
||||||
|
|
||||||
|
#### todo
|
||||||
|
|
||||||
|
Queue of actions to execute in the main loop
|
||||||
|
|
||||||
|
#### checkComplete()
|
||||||
|
|
||||||
|
#### *async* doAct(setpAction=False, repeatDataOnActions=[], subMessage=False)
|
||||||
|
|
||||||
|
#### *async* dump()
|
||||||
|
|
||||||
|
#### *async* id()
|
||||||
|
|
||||||
|
#### pActions *= []*
|
||||||
|
|
||||||
|
#### *async* processPacket(p, subMessage=False, rdoaoc=[])
|
|
@ -0,0 +1,11 @@
|
||||||
|
<a id="sponge-protocols-bubble"></a>
|
||||||
|
|
||||||
|
# Sponge.Protocols.bubble
|
||||||
|
|
||||||
|
### *class* Sponge.Protocols.bubble.Bubble(yctx: [YCTX](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.YCTX))
|
||||||
|
|
||||||
|
Peer to peer protol
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/Sponge/Protocols/bubble.py)
|
||||||
|
|
||||||
|
#### pActions *= ['sendToPeer']*
|
|
@ -0,0 +1,11 @@
|
||||||
|
<a id="sponge-protocols-catch"></a>
|
||||||
|
|
||||||
|
# Sponge.Protocols.catch
|
||||||
|
|
||||||
|
### *class* Sponge.Protocols.catch.Catch(yctx: [YCTX](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.YCTX))
|
||||||
|
|
||||||
|
Catch exchange protocol
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py)
|
||||||
|
|
||||||
|
#### pActions *= ['sendCatch', 'routeCatch', 'syncIndex']*
|
|
@ -0,0 +1,11 @@
|
||||||
|
<a id="sponge-protocols-cryptography"></a>
|
||||||
|
|
||||||
|
# Sponge.Protocols.cryptography
|
||||||
|
|
||||||
|
### *class* Sponge.Protocols.cryptography.CryptographyFilter(yctx: [YCTX](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.YCTX))
|
||||||
|
|
||||||
|
Cryptographic operations protocol
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/cryptography.py)
|
||||||
|
|
||||||
|
#### pActions *= ['initCryptography']*
|
|
@ -0,0 +1,11 @@
|
||||||
|
<a id="sponge-protocols-hopper"></a>
|
||||||
|
|
||||||
|
# Sponge.Protocols.hopper
|
||||||
|
|
||||||
|
### *class* Sponge.Protocols.hopper.Hopper(yctx: [YCTX](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.YCTX))
|
||||||
|
|
||||||
|
Internet inter(h)op protocol
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/hopper.py)
|
||||||
|
|
||||||
|
#### pActions *= ['hop', 'routeHop']*
|
|
@ -0,0 +1,13 @@
|
||||||
|
<a id="sponge-protocols-map"></a>
|
||||||
|
|
||||||
|
# Sponge.Protocols.map
|
||||||
|
|
||||||
|
### *class* Sponge.Protocols.map.Map(yctx: [YCTX](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.YCTX))
|
||||||
|
|
||||||
|
Network mapping protocol
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/map.py)
|
||||||
|
|
||||||
|
#### pActions *= ['map', 'initCryptography']*
|
||||||
|
|
||||||
|
#### process(message, isSubMessage=False)
|
|
@ -2,43 +2,63 @@
|
||||||
|
|
||||||
# base: Primary filtering functionality
|
# base: Primary filtering functionality
|
||||||
|
|
||||||
### *class* Sponge.base.Filter(cache, onodeID, todo, cLog)
|
### *class* Sponge.base.Filter(cache, onodeID, todo, cryptographyInfo)
|
||||||
|
|
||||||
Packet filtering orchestration
|
Packet filtering orchestration
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/base.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/base.py)
|
||||||
|
|
||||||
cLog
|
|
||||||
: Reference to run.Node.cLog for logging
|
|
||||||
|
|
||||||
cache: Daisy.Cache.Cache
|
|
||||||
: Reference to our Daisy Cache instance
|
|
||||||
|
|
||||||
completed: list
|
|
||||||
: List of completed messages IDs
|
|
||||||
|
|
||||||
todo
|
|
||||||
: Reference to list of actions to do in the Node
|
|
||||||
|
|
||||||
onodeID
|
|
||||||
: PierMesh node ID
|
|
||||||
|
|
||||||
#### cache
|
#### cache
|
||||||
|
|
||||||
Messages is temporary storage for unfinished messages
|
Reference to our Daisy Cache instance
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
[Daisy.Cache.Cache](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md#Daisy.Cache.Cache)
|
||||||
|
|
||||||
|
#### onodeID
|
||||||
|
|
||||||
|
PierMesh node ID
|
||||||
|
|
||||||
|
#### todo
|
||||||
|
|
||||||
|
Reference to list of actions to do in the Node
|
||||||
|
|
||||||
|
#### cryptographyInfo
|
||||||
|
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
[Cryptography.WhaleSong.Transport](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport)
|
||||||
|
|
||||||
|
#### messages
|
||||||
|
|
||||||
|
Temporary storage for unfinished messages
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### submessages
|
||||||
|
|
||||||
|
Temporary storage for unfinished submessages
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### completed
|
||||||
|
|
||||||
|
List of finished message ids so we don’t reprocess messages
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
list
|
||||||
|
|
||||||
#### mCheck(payload: bytes)
|
#### mCheck(payload: bytes)
|
||||||
|
|
||||||
Check if payload bytes are msgpack encoded, otherwise skip
|
Check if payload bytes are msgpack encoded, otherwise skip
|
||||||
|
|
||||||
#### *async* protoMap(protocolID: int)
|
#### *async* protoMap(protocolID: int, packetsID, packetCount, sourceNode, submessagesIDs=[], pAction=None)
|
||||||
|
|
||||||
Get protocol from protocol ID using the mlookup table
|
Get protocol from protocol ID using the mlookup table
|
||||||
|
|
||||||
#### *async* protoRoute(completeMessage: dict)
|
|
||||||
|
|
||||||
Route message to proper protocol handler
|
|
||||||
|
|
||||||
#### selfCheck(packet)
|
#### selfCheck(packet)
|
||||||
|
|
||||||
Check if this is a self packet, if so skip
|
Check if this is a self packet, if so skip
|
||||||
|
@ -46,31 +66,3 @@ Check if this is a self packet, if so skip
|
||||||
#### *async* sieve(packet)
|
#### *async* sieve(packet)
|
||||||
|
|
||||||
Base filtering logic, takes a single MeshTastic packet
|
Base filtering logic, takes a single MeshTastic packet
|
||||||
|
|
||||||
<a id="protocols"></a>
|
|
||||||
|
|
||||||
# Protocols
|
|
||||||
|
|
||||||
#### *async* bubble.filter(recipient, recipientNode, onodeID, todo)
|
|
||||||
|
|
||||||
Peer to peer protol
|
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/Sponge/Protocols/bubble.py)
|
|
||||||
|
|
||||||
#### *async* catch.filter(recipient, recipientNode, todo)
|
|
||||||
|
|
||||||
Catch exchange protocol
|
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py)
|
|
||||||
|
|
||||||
#### *async* cryptography.filter(recipientNode, todo)
|
|
||||||
|
|
||||||
Cryptographic operations protocol
|
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/cryptography.py)
|
|
||||||
|
|
||||||
#### *async* map.filter(todo)
|
|
||||||
|
|
||||||
Network mapping protocol
|
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/map.py)
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
# Transceiver: Layer 0 data transceiving
|
# Transceiver: Layer 0 data transceiving
|
||||||
|
|
||||||
### *class* Transceiver.Transceiver.Transceiver(device, filter, onodeID, cache, catch, cryptographyInfo, cLog)
|
### *class* Transceiver.Transceiver.Transceiver(device, filter, onodeID, cache, catch, cryptographyInfo, network)
|
||||||
|
|
||||||
Handling LoRa transceiving
|
Handling LoRa transceiving
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ Reference to run.Node.cLog for logging
|
||||||
Cryptography instance for encrypting transmissions
|
Cryptography instance for encrypting transmissions
|
||||||
|
|
||||||
* **Type:**
|
* **Type:**
|
||||||
[Cryptography.WhaleSong.DHEFern](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern)
|
Cryptography.WhaleSong.DHEFern
|
||||||
|
|
||||||
#### filter
|
#### filter
|
||||||
|
|
||||||
|
@ -114,6 +114,7 @@ Checks if acknowldgement was received per packet and if not resends
|
||||||
#### responseCheck(packet)
|
#### responseCheck(packet)
|
||||||
|
|
||||||
On acknowldgement response set acks based on response
|
On acknowldgement response set acks based on response
|
||||||
|
TODO: Stop this being sent to sieve
|
||||||
|
|
||||||
#### send(packet, recipientNode=False)
|
#### send(packet, recipientNode=False)
|
||||||
|
|
||||||
|
@ -122,6 +123,8 @@ Send individual packet
|
||||||
* **Parameters:**
|
* **Parameters:**
|
||||||
**recipientNode** – If set send to specified node
|
**recipientNode** – If set send to specified node
|
||||||
|
|
||||||
#### *async* sendAnnounce()
|
#### *async* sendAnnounce(dontRespond=False)
|
||||||
|
|
||||||
Send an announce packet (contains basic network mapping information) every so often so new nodes autoconnect
|
Send an announce packet (contains basic network mapping information) every so often so new nodes autoconnect
|
||||||
|
|
||||||
|
#### *async* sendMessage(message: Message)
|
||||||
|
|
182
docs/readme.md
182
docs/readme.md
|
@ -11,7 +11,6 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
|
|
||||||
* [run: PierMesh service runner](/PierMesh/piermesh/src/branch/main/docs/run.md)
|
* [run: PierMesh service runner](/PierMesh/piermesh/src/branch/main/docs/run.md)
|
||||||
* [`Node`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node)
|
* [`Node`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node)
|
||||||
* [`Node.toLog`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.toLog)
|
|
||||||
* [`Node.actions`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.actions)
|
* [`Node.actions`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.actions)
|
||||||
* [`Node.todo`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.todo)
|
* [`Node.todo`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.todo)
|
||||||
* [`Node.network`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.network)
|
* [`Node.network`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.network)
|
||||||
|
@ -20,29 +19,28 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
* [`Node.nodeInfo`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.nodeInfo)
|
* [`Node.nodeInfo`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.nodeInfo)
|
||||||
* [`Node.onodeID`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.onodeID)
|
* [`Node.onodeID`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.onodeID)
|
||||||
* [`Node.oTransceiver`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.oTransceiver)
|
* [`Node.oTransceiver`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.oTransceiver)
|
||||||
* [`Node.processed`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.processed)
|
|
||||||
* [`Node.proc`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.proc)
|
* [`Node.proc`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.proc)
|
||||||
* [`Node.mTasks`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.mTasks)
|
* [`Node.mTasks`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.mTasks)
|
||||||
* [`Node.action_initNodeDH()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_initNodeDH)
|
* [`Node.action_addPSK()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_addPSK)
|
||||||
* [`Node.action_keyDeriveDH()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_keyDeriveDH)
|
* [`Node.action_hop()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_hop)
|
||||||
|
* [`Node.action_initCryptography()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_initCryptography)
|
||||||
* [`Node.action_map()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_map)
|
* [`Node.action_map()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_map)
|
||||||
|
* [`Node.action_routeCatch()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_routeCatch)
|
||||||
|
* [`Node.action_routeHop()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_routeHop)
|
||||||
* [`Node.action_sendCatch()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_sendCatch)
|
* [`Node.action_sendCatch()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_sendCatch)
|
||||||
* [`Node.action_sendToPeer()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_sendToPeer)
|
* [`Node.action_sendToPeer()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_sendToPeer)
|
||||||
* [`Node.cLog()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.cLog)
|
* [`Node.action_syncIndex()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_syncIndex)
|
||||||
|
* [`Node.fsInit()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.fsInit)
|
||||||
|
* [`Node.main()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.main)
|
||||||
|
* [`Node.monitor()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.monitor)
|
||||||
* [`Node.spongeListen()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.spongeListen)
|
* [`Node.spongeListen()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.spongeListen)
|
||||||
* [ui: TUI application](/PierMesh/piermesh/src/branch/main/docs/ui.md)
|
* [tlog](/PierMesh/piermesh/src/branch/main/docs/tlog.md)
|
||||||
* [`TUI`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI)
|
* [`VHandler`](/PierMesh/piermesh/src/branch/main/docs/tlog.md#tlog.VHandler)
|
||||||
* [`TUI.visibleLogo`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.visibleLogo)
|
* [`VHandler.tolog`](/PierMesh/piermesh/src/branch/main/docs/tlog.md#tlog.VHandler.tolog)
|
||||||
* [`TUI.nodeOb`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.nodeOb)
|
* [`VHandler.emit()`](/PierMesh/piermesh/src/branch/main/docs/tlog.md#tlog.VHandler.emit)
|
||||||
* [`TUI.done`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.done)
|
* [`VHandler.tolog`](/PierMesh/piermesh/src/branch/main/docs/tlog.md#id0)
|
||||||
* [`TUI.CSS_PATH`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.CSS_PATH)
|
* [`logUI()`](/PierMesh/piermesh/src/branch/main/docs/tlog.md#tlog.logUI)
|
||||||
* [`TUI.action_quitFull()`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.action_quitFull)
|
* [`runLogUI()`](/PierMesh/piermesh/src/branch/main/docs/tlog.md#tlog.runLogUI)
|
||||||
* [`TUI.action_toggleFullscreen()`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.action_toggleFullscreen)
|
|
||||||
* [`TUI.compose()`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.compose)
|
|
||||||
* [`TUI.do_set_cpu_percent()`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.do_set_cpu_percent)
|
|
||||||
* [`TUI.do_set_mem()`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.do_set_mem)
|
|
||||||
* [`TUI.do_write_line()`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.do_write_line)
|
|
||||||
* [`TUI.on_mount()`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.on_mount)
|
|
||||||
* [Network: Network map representation](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md)
|
* [Network: Network map representation](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md)
|
||||||
* [`Network`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network)
|
* [`Network`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network)
|
||||||
* [`Network.omap`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.omap)
|
* [`Network.omap`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.omap)
|
||||||
|
@ -60,7 +58,9 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
* [`Network.getRoute()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.getRoute)
|
* [`Network.getRoute()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.getRoute)
|
||||||
* [`Network.mimport()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.mimport)
|
* [`Network.mimport()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.mimport)
|
||||||
* [`Network.render()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.render)
|
* [`Network.render()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.render)
|
||||||
|
* [`Network.syncaddLookup()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.syncaddLookup)
|
||||||
* [hopper: Small internet interop utilities](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md)
|
* [hopper: Small internet interop utilities](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md)
|
||||||
|
* [`downloadFile()`](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md#Components.hopper.downloadFile)
|
||||||
* [`get()`](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md#Components.hopper.get)
|
* [`get()`](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md#Components.hopper.get)
|
||||||
* [`post()`](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md#Components.hopper.post)
|
* [`post()`](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md#Components.hopper.post)
|
||||||
* [Daisy based cache](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md)
|
* [Daisy based cache](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md)
|
||||||
|
@ -73,55 +73,102 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
* [`Catch`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch)
|
* [`Catch`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch)
|
||||||
* [`Catch.get()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch.get)
|
* [`Catch.get()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch.get)
|
||||||
* [`Catch.sget()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch.sget)
|
* [`Catch.sget()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch.sget)
|
||||||
|
* [Credential](/PierMesh/piermesh/src/branch/main/docs/Daisy/Credential.md)
|
||||||
|
* [`Credential`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Credential.md#Daisy.Credential.Credential)
|
||||||
|
* [CryptographyUtil](/PierMesh/piermesh/src/branch/main/docs/Daisy/CryptographyUtil.md)
|
||||||
|
* [`SteelPetal`](/PierMesh/piermesh/src/branch/main/docs/Daisy/CryptographyUtil.md#Daisy.CryptographyUtil.SteelPetal)
|
||||||
|
* [`SteelPetal.decrypt()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/CryptographyUtil.md#Daisy.CryptographyUtil.SteelPetal.decrypt)
|
||||||
|
* [`SteelPetal.encrypt()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/CryptographyUtil.md#Daisy.CryptographyUtil.SteelPetal.encrypt)
|
||||||
|
* [`SteelPetal.pad()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/CryptographyUtil.md#Daisy.CryptographyUtil.SteelPetal.pad)
|
||||||
* [Daisy](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md)
|
* [Daisy](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md)
|
||||||
* [`Daisy`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy)
|
* [`Daisy`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy)
|
||||||
* [`Daisy.filepath`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.filepath)
|
* [`Daisy.filepath`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.filepath)
|
||||||
* [`Daisy.msg`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.msg)
|
* [`Daisy.msg`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.msg)
|
||||||
* [`Daisy.get()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.get)
|
* [`Daisy.get()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.get)
|
||||||
|
* [`Daisy.json_to_msg()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.json_to_msg)
|
||||||
* [`Daisy.read()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.read)
|
* [`Daisy.read()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.read)
|
||||||
* [`Daisy.sublist()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.sublist)
|
* [`Daisy.sublist()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.sublist)
|
||||||
* [`Daisy.write()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.write)
|
* [`Daisy.write()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.write)
|
||||||
* [Soil: Daisy signal management](/PierMesh/piermesh/src/branch/main/docs/Daisy/Soil.md)
|
* [Index](/PierMesh/piermesh/src/branch/main/docs/Daisy/Index.md)
|
||||||
* [`Compound`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Soil.md#Daisy.Soil.Compound)
|
* [`Index`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Index.md#Daisy.Index.Index)
|
||||||
* [`Compound.on_any_event()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Soil.md#Daisy.Soil.Compound.on_any_event)
|
* [`Index.addEntry()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Index.md#Daisy.Index.Index.addEntry)
|
||||||
|
* [`Index.search()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Index.md#Daisy.Index.Index.search)
|
||||||
|
* [Ref](/PierMesh/piermesh/src/branch/main/docs/Daisy/Ref.md)
|
||||||
|
* [`Ref`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Ref.md#Daisy.Ref.Ref)
|
||||||
* [Store: Daisy key value store](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md)
|
* [Store: Daisy key value store](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md)
|
||||||
* [`Store`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store)
|
* [`Store`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store)
|
||||||
|
* [`Store.epehemeral`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store.epehemeral)
|
||||||
|
* [`Store.createEmpty()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store.createEmpty)
|
||||||
* [`Store.getRecord()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store.getRecord)
|
* [`Store.getRecord()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store.getRecord)
|
||||||
* [`Store.update()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store.update)
|
* [`Store.update()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store.update)
|
||||||
* [WhaleSong: Diffie hellman ephemeral Fernet based encryption](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md)
|
* [WhaleSong](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md)
|
||||||
* [`DHEFern`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern)
|
* [`Transport`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport)
|
||||||
* [`DHEFern.cLog`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.cLog)
|
* [`Transport.loadedKeys`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.loadedKeys)
|
||||||
* [`DHEFern.loadedParams`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.loadedParams)
|
* [`Transport.nodeNickname`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.nodeNickname)
|
||||||
* [`DHEFern.loadedKeys`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.loadedKeys)
|
* [`Transport.cache`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.cache)
|
||||||
* [`DHEFern.nodeNickname`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.nodeNickname)
|
* [`Transport.publicKey`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.publicKey)
|
||||||
* [`DHEFern.cache`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.cache)
|
* [`Transport.privateKey`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.privateKey)
|
||||||
* [`DHEFern.publicKey`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.publicKey)
|
* [`Transport.daisyCryptography`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.daisyCryptography)
|
||||||
* [`DHEFern.privateKey`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.privateKey)
|
* [`Transport.addPeerEphemeralKey()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.addPeerEphemeralKey)
|
||||||
* [`DHEFern.checkInMem()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.checkInMem)
|
* [`Transport.addPublickey()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.addPublickey)
|
||||||
* [`DHEFern.decrypt()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.decrypt)
|
* [`Transport.decrypt()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.decrypt)
|
||||||
* [`DHEFern.encrypt()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.encrypt)
|
* [`Transport.encrypt()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.encrypt)
|
||||||
* [`DHEFern.genKeyPair()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.genKeyPair)
|
* [`Transport.genOurEphemeralKey()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.genOurEphemeralKey)
|
||||||
* [`DHEFern.genParams()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.genParams)
|
* [`Transport.genStaticKey()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.genStaticKey)
|
||||||
* [`DHEFern.getParamsBytes()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.getParamsBytes)
|
* [`Transport.generateSessionKey()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.generateSessionKey)
|
||||||
* [`DHEFern.getRecord()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.getRecord)
|
* [`Transport.getRecord()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.getRecord)
|
||||||
* [`DHEFern.getSalt()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.getSalt)
|
* [`Transport.initStore()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.initStore)
|
||||||
* [`DHEFern.initStore()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.initStore)
|
* [`Transport.kdf()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.kdf)
|
||||||
* [`DHEFern.keyDerive()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.keyDerive)
|
* [`Transport.sessionSetup()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.sessionSetup)
|
||||||
* [`DHEFern.loadParamBytes()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.loadParamBytes)
|
|
||||||
* [`DHEFern.loadRecordToMem()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.loadRecordToMem)
|
|
||||||
* [base: Primary filtering functionality](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md)
|
* [base: Primary filtering functionality](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md)
|
||||||
* [`Filter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter)
|
* [`Filter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter)
|
||||||
* [`Filter.cache`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.cache)
|
* [`Filter.cache`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.cache)
|
||||||
|
* [`Filter.onodeID`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.onodeID)
|
||||||
|
* [`Filter.todo`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.todo)
|
||||||
|
* [`Filter.cryptographyInfo`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.cryptographyInfo)
|
||||||
|
* [`Filter.messages`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.messages)
|
||||||
|
* [`Filter.submessages`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.submessages)
|
||||||
|
* [`Filter.completed`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.completed)
|
||||||
* [`Filter.mCheck()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.mCheck)
|
* [`Filter.mCheck()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.mCheck)
|
||||||
* [`Filter.protoMap()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.protoMap)
|
* [`Filter.protoMap()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.protoMap)
|
||||||
* [`Filter.protoRoute()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.protoRoute)
|
|
||||||
* [`Filter.selfCheck()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.selfCheck)
|
* [`Filter.selfCheck()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.selfCheck)
|
||||||
* [`Filter.sieve()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.sieve)
|
* [`Filter.sieve()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.sieve)
|
||||||
* [Protocols](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#protocols)
|
* [Sponge.Protocols.Yellow](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md)
|
||||||
* [`bubble.filter()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.bubble.filter)
|
* [`YCTX`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.YCTX)
|
||||||
* [`catch.filter()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.catch.filter)
|
* [`Yellow`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow)
|
||||||
* [`cryptography.filter()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.cryptography.filter)
|
* [`Yellow.yctx`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.yctx)
|
||||||
* [`map.filter()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.map.filter)
|
* [`Yellow.message`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.message)
|
||||||
|
* [`Yellow.submessages`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.submessages)
|
||||||
|
* [`Yellow.submessagesIDs`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.submessagesIDs)
|
||||||
|
* [`Yellow.finishedSubmessages`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.finishedSubmessages)
|
||||||
|
* [`Yellow.dataOrder`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.dataOrder)
|
||||||
|
* [`Yellow.data`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.data)
|
||||||
|
* [`Yellow.nonce`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.nonce)
|
||||||
|
* [`Yellow.tag`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.tag)
|
||||||
|
* [`Yellow.gotHead`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.gotHead)
|
||||||
|
* [`Yellow.todo`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.todo)
|
||||||
|
* [`Yellow.checkComplete()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.checkComplete)
|
||||||
|
* [`Yellow.doAct()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.doAct)
|
||||||
|
* [`Yellow.dump()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.dump)
|
||||||
|
* [`Yellow.id()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.id)
|
||||||
|
* [`Yellow.pActions`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.pActions)
|
||||||
|
* [`Yellow.processPacket()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.processPacket)
|
||||||
|
* [Sponge.Protocols.bubble](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/bubble.md)
|
||||||
|
* [`Bubble`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/bubble.md#Sponge.Protocols.bubble.Bubble)
|
||||||
|
* [`Bubble.pActions`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/bubble.md#Sponge.Protocols.bubble.Bubble.pActions)
|
||||||
|
* [Sponge.Protocols.catch](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/catch.md)
|
||||||
|
* [`Catch`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/catch.md#Sponge.Protocols.catch.Catch)
|
||||||
|
* [`Catch.pActions`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/catch.md#Sponge.Protocols.catch.Catch.pActions)
|
||||||
|
* [Sponge.Protocols.cryptography](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/cryptography.md)
|
||||||
|
* [`CryptographyFilter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/cryptography.md#Sponge.Protocols.cryptography.CryptographyFilter)
|
||||||
|
* [`CryptographyFilter.pActions`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/cryptography.md#Sponge.Protocols.cryptography.CryptographyFilter.pActions)
|
||||||
|
* [Sponge.Protocols.hopper](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/hopper.md)
|
||||||
|
* [`Hopper`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/hopper.md#Sponge.Protocols.hopper.Hopper)
|
||||||
|
* [`Hopper.pActions`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/hopper.md#Sponge.Protocols.hopper.Hopper.pActions)
|
||||||
|
* [Sponge.Protocols.map](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/map.md)
|
||||||
|
* [`Map`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/map.md#Sponge.Protocols.map.Map)
|
||||||
|
* [`Map.pActions`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/map.md#Sponge.Protocols.map.Map.pActions)
|
||||||
|
* [`Map.process()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/map.md#Sponge.Protocols.map.Map.process)
|
||||||
* [Header packet: Metadata packet](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md)
|
* [Header packet: Metadata packet](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md)
|
||||||
* [`Header`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header)
|
* [`Header`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header)
|
||||||
* [`Header.sender`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.sender)
|
* [`Header.sender`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.sender)
|
||||||
|
@ -147,7 +194,23 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
* [SubMessage: Additional data for larger messages](/PierMesh/piermesh/src/branch/main/docs/Packets/SubMessage.md)
|
* [SubMessage: Additional data for larger messages](/PierMesh/piermesh/src/branch/main/docs/Packets/SubMessage.md)
|
||||||
* [`SubMessage`](/PierMesh/piermesh/src/branch/main/docs/Packets/SubMessage.md#Packets.SubMessage.SubMessage)
|
* [`SubMessage`](/PierMesh/piermesh/src/branch/main/docs/Packets/SubMessage.md#Packets.SubMessage.SubMessage)
|
||||||
* [SubPacket: Packets for submessages](/PierMesh/piermesh/src/branch/main/docs/Packets/SubPacket.md)
|
* [SubPacket: Packets for submessages](/PierMesh/piermesh/src/branch/main/docs/Packets/SubPacket.md)
|
||||||
* [`SubPacket`](/PierMesh/piermesh/src/branch/main/docs/Packets/SubPacket.md#Packets.SubPacket.SubPacket)
|
* [`SubMessage`](/PierMesh/piermesh/src/branch/main/docs/Packets/SubPacket.md#Packets.SubMessage.SubMessage)
|
||||||
|
* [bubble.Bubble](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/bubble/Bubble.md)
|
||||||
|
* [`Bubble`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/bubble/Bubble.md#Packets.Messages.Protocols.bubble.Bubble.Bubble)
|
||||||
|
* [catch.IndexSync](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/catch/IndexSync.md)
|
||||||
|
* [`IndexSync`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/catch/IndexSync.md#Packets.Messages.Protocols.catch.IndexSync.IndexSync)
|
||||||
|
* [catch.Request](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/catch/Request.md)
|
||||||
|
* [`CatchRequest`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/catch/Request.md#Packets.Messages.Protocols.catch.Request.CatchRequest)
|
||||||
|
* [catch.Response](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/catch/Response.md)
|
||||||
|
* [`CatchResponse`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/catch/Response.md#Packets.Messages.Protocols.catch.Response.CatchResponse)
|
||||||
|
* [cryptography.Handshake](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/cryptography/Handshake.md)
|
||||||
|
* [`Handshake`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/cryptography/Handshake.md#Packets.Messages.Protocols.cryptography.Handshake.Handshake)
|
||||||
|
* [hopper.Request](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/hopper/Request.md)
|
||||||
|
* [`HopperRequest`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/hopper/Request.md#Packets.Messages.Protocols.hopper.Request.HopperRequest)
|
||||||
|
* [hopper.Response](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/hopper/Response.md)
|
||||||
|
* [`HopperResponse`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/hopper/Response.md#Packets.Messages.Protocols.hopper.Response.HopperResponse)
|
||||||
|
* [map.Announce](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/map/Announce.md)
|
||||||
|
* [`AnnounceMessage`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/map/Announce.md#Packets.Messages.Protocols.map.Announce.AnnounceMessage)
|
||||||
* [Transceiver: Layer 0 data transceiving](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md)
|
* [Transceiver: Layer 0 data transceiving](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md)
|
||||||
* [`Transceiver`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver)
|
* [`Transceiver`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver)
|
||||||
* [`Transceiver.cLog`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.cLog)
|
* [`Transceiver.cLog`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.cLog)
|
||||||
|
@ -170,17 +233,32 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
* [`Transceiver.responseCheck()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.responseCheck)
|
* [`Transceiver.responseCheck()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.responseCheck)
|
||||||
* [`Transceiver.send()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.send)
|
* [`Transceiver.send()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.send)
|
||||||
* [`Transceiver.sendAnnounce()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.sendAnnounce)
|
* [`Transceiver.sendAnnounce()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.sendAnnounce)
|
||||||
|
* [`Transceiver.sendMessage()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.sendMessage)
|
||||||
* [serve: Web UI server](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md)
|
* [serve: Web UI server](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md)
|
||||||
* [`Server`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server)
|
* [`Server`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server)
|
||||||
* [`Server.cLog`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.cLog)
|
* [`Server.transceiver`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.transceiver)
|
||||||
* [`Server.transmitter`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.transmitter)
|
|
||||||
* [`Server.network`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.network)
|
* [`Server.network`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.network)
|
||||||
* [`Server.nodeID`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.nodeID)
|
* [`Server.nodeID`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.nodeID)
|
||||||
* [`Server.peerIDs`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.peerIDs)
|
* [`Server.peerIDs`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.peerIDs)
|
||||||
* [`Server.app`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.app)
|
* [`Server.app`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.app)
|
||||||
* [`Server.catch`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.catch)
|
* [`Server.catch`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.catch)
|
||||||
|
* [`Server.getPSKs()`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.getPSKs)
|
||||||
* [`Server.sendToPeer()`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.sendToPeer)
|
* [`Server.sendToPeer()`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.sendToPeer)
|
||||||
|
* [Context](/PierMesh/piermesh/src/branch/main/docs/Config/Context.md)
|
||||||
|
* [`Context`](/PierMesh/piermesh/src/branch/main/docs/Config/Context.md#Config.Context.Context)
|
||||||
|
* [`Context.ctx`](/PierMesh/piermesh/src/branch/main/docs/Config/Context.md#Config.Context.Context.ctx)
|
||||||
|
* [Services.Action](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md)
|
||||||
|
* [`Action`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action)
|
||||||
|
* [`Action.action`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.action)
|
||||||
|
* [`Action.data`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.data)
|
||||||
|
* [`Action.sender`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.sender)
|
||||||
|
* [`Action.senderID`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.senderID)
|
||||||
|
* [`Action.sourceNode`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.sourceNode)
|
||||||
|
* [`Action.recipient`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.recipient)
|
||||||
|
* [`Action.recipientNode`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.recipientNode)
|
||||||
|
* [`Action.getAction()`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.getAction)
|
||||||
|
* [`Action.getData()`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.getData)
|
||||||
|
$
|
||||||
|
|
||||||
# System Overview
|
# System Overview
|
||||||
|
|
||||||
|
|
68
docs/run.md
68
docs/run.md
|
@ -8,13 +8,6 @@ Class that handles most of the PierMesh data
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/run.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/run.py)
|
||||||
|
|
||||||
#### toLog
|
|
||||||
|
|
||||||
We store logs to be processed here
|
|
||||||
|
|
||||||
* **Type:**
|
|
||||||
list
|
|
||||||
|
|
||||||
#### actions
|
#### actions
|
||||||
|
|
||||||
Dictionary mapping methods with the action prefix to the method name after action dynamically to be called through Sponge (Sponge.base) filtering
|
Dictionary mapping methods with the action prefix to the method name after action dynamically to be called through Sponge (Sponge.base) filtering
|
||||||
|
@ -71,13 +64,6 @@ LoRa transceiver Transceiver
|
||||||
* **Type:**
|
* **Type:**
|
||||||
[Transceiver](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver)
|
[Transceiver](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver)
|
||||||
|
|
||||||
#### processed
|
|
||||||
|
|
||||||
List of IDs of already completed messages so that we don’t reprocess messages
|
|
||||||
|
|
||||||
* **Type:**
|
|
||||||
list
|
|
||||||
|
|
||||||
#### proc
|
#### proc
|
||||||
|
|
||||||
This process (psutil.Process), used for managing and monitoring PierMesh
|
This process (psutil.Process), used for managing and monitoring PierMesh
|
||||||
|
@ -92,29 +78,37 @@ Dictionary of PierMesh service tasks
|
||||||
* **Type:**
|
* **Type:**
|
||||||
dict
|
dict
|
||||||
|
|
||||||
#### SEE ALSO
|
#### *async* action_addPSK(data)
|
||||||
`logPassLoop`
|
|
||||||
: Loop to handle logging to file and TUI
|
|
||||||
|
|
||||||
#### *async* action_initNodeDH(data: dict)
|
Action to add PSK for specific node, currently unused
|
||||||
|
|
||||||
Initialize diffie hellman key exchange
|
#### *async* action_hop(data)
|
||||||
|
|
||||||
|
Proxy a request to the main internet (in the future cross protocol/link)
|
||||||
|
|
||||||
|
#### *async* action_initCryptography(data: dict)
|
||||||
|
|
||||||
|
Initialize AES-GCM encrypted transport session
|
||||||
|
|
||||||
#### SEE ALSO
|
#### SEE ALSO
|
||||||
`Cryptography.DHEFern.DHEFern`
|
[`Cryptography.WhaleSong.Transport`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport)
|
||||||
: End to end encryption functionality
|
: End to end encryption functionality
|
||||||
|
|
||||||
#### *async* action_keyDeriveDH(data: dict)
|
|
||||||
|
|
||||||
Derive key via diffie hellman key exchange
|
|
||||||
|
|
||||||
#### *async* action_map(data: dict)
|
#### *async* action_map(data: dict)
|
||||||
|
|
||||||
Map new network data to internal network map
|
Map new network data to internal network map
|
||||||
|
|
||||||
#### SEE ALSO
|
#### SEE ALSO
|
||||||
`Siph.network.Network`
|
`Siph.network.Network`
|
||||||
: Layered graph etwork representation
|
: Layered graph network representation
|
||||||
|
|
||||||
|
#### *async* action_routeCatch(data: dict)
|
||||||
|
|
||||||
|
Route received catch to peer who requested it
|
||||||
|
|
||||||
|
#### *async* action_routeHop(data: dict)
|
||||||
|
|
||||||
|
Return proxy request results to requester
|
||||||
|
|
||||||
#### *async* action_sendCatch(data: dict)
|
#### *async* action_sendCatch(data: dict)
|
||||||
|
|
||||||
|
@ -131,21 +125,27 @@ Send data to a peer connected to the server
|
||||||
`Sponge.Protocols`
|
`Sponge.Protocols`
|
||||||
: Protocol based packet filtering
|
: Protocol based packet filtering
|
||||||
|
|
||||||
`webui.serve.Server`
|
[`Splash.serve.Server`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server)
|
||||||
: Runs a light Microdot web server with http/s and websocket functionality
|
: Runs a light Microdot web server with http/s and websocket functionality
|
||||||
|
|
||||||
`webui.serve.Server.sendToPeer`
|
[`Splash.serve.Server.sendToPeer`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.sendToPeer)
|
||||||
: Function to actually execute the action
|
: Function to actually execute the action
|
||||||
|
|
||||||
#### cLog(priority: int, message: str)
|
#### *async* action_syncIndex(data: dict)
|
||||||
|
|
||||||
Convenience function that logs to the ui and log files
|
Add received index entries to Catch via the a remote Catch index
|
||||||
|
|
||||||
* **Parameters:**
|
#### *async* fsInit()
|
||||||
* **priority** (*int*) – Priority of message to be passed to logging
|
|
||||||
* **message** (*str*) – Message to log
|
Initialize the file system for use
|
||||||
* **Return type:**
|
|
||||||
None
|
#### *async* main()
|
||||||
|
|
||||||
|
Main loop, sets up the message listening, system monitoring and server running loops
|
||||||
|
|
||||||
|
#### *async* monitor()
|
||||||
|
|
||||||
|
Monitor and log ram and cpu usage
|
||||||
|
|
||||||
#### *async* spongeListen()
|
#### *async* spongeListen()
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
<a id="soil-daisy-signal-management"></a>
|
||||||
|
|
||||||
|
# Soil: Daisy signal management
|
|
@ -0,0 +1,3 @@
|
||||||
|
<a id="ui-tui-application"></a>
|
||||||
|
|
||||||
|
# ui: TUI application
|
|
@ -0,0 +1,35 @@
|
||||||
|
<a id="module-tlog"></a>
|
||||||
|
|
||||||
|
<a id="tlog"></a>
|
||||||
|
|
||||||
|
# tlog
|
||||||
|
|
||||||
|
### *class* tlog.VHandler(level, tolog)
|
||||||
|
|
||||||
|
Custom log handler to push logs into a thread-safe queue so the TUI can read them
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/tlog.py)
|
||||||
|
|
||||||
|
#### tolog
|
||||||
|
|
||||||
|
Thread-safe log queue
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
Queue.queue
|
||||||
|
|
||||||
|
#### emit(record)
|
||||||
|
|
||||||
|
Do whatever it takes to actually log the specified logging record.
|
||||||
|
|
||||||
|
This version is intended to be implemented by subclasses and so
|
||||||
|
raises a NotImplementedError.
|
||||||
|
|
||||||
|
#### tolog *= <queue.Queue object>*
|
||||||
|
|
||||||
|
### tlog.logUI(stdscr, tolog, nodeNickname)
|
||||||
|
|
||||||
|
TUI loop
|
||||||
|
|
||||||
|
### tlog.runLogUI(tolog, nodeNickname)
|
||||||
|
|
||||||
|
Some required kludge
|
74
docs/ui.md
74
docs/ui.md
|
@ -1,74 +0,0 @@
|
||||||
<a id="ui-tui-application"></a>
|
|
||||||
|
|
||||||
# ui: TUI application
|
|
||||||
|
|
||||||
### *class* ui.TUI(driver_class: Type[Driver] | None = None, css_path: str | PurePath | List[str | PurePath] | None = None, watch_css: bool = False)
|
|
||||||
|
|
||||||
TUI for PierMesh
|
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/ui.py)
|
|
||||||
|
|
||||||
#### visibleLogo
|
|
||||||
|
|
||||||
Whether the logo is visible or not, used in toggling visibility
|
|
||||||
|
|
||||||
* **Type:**
|
|
||||||
bool
|
|
||||||
|
|
||||||
#### nodeOb
|
|
||||||
|
|
||||||
Reference to the Node running the PierMesh service
|
|
||||||
|
|
||||||
* **Type:**
|
|
||||||
[Node](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node)
|
|
||||||
|
|
||||||
#### done
|
|
||||||
|
|
||||||
Whether the TUI has been killed
|
|
||||||
|
|
||||||
* **Type:**
|
|
||||||
bool
|
|
||||||
|
|
||||||
#### CSS_PATH *: ClassVar[CSSPathType | None]* *= 'ui.tcss'*
|
|
||||||
|
|
||||||
File paths to load CSS from.
|
|
||||||
|
|
||||||
#### action_quitFull()
|
|
||||||
|
|
||||||
Kill the whole stack by setting self to done and terminating the thread. We check in run.monitor later and kill the rest of the stack then with psutil
|
|
||||||
|
|
||||||
#### SEE ALSO
|
|
||||||
`run.monitor`
|
|
||||||
|
|
||||||
#### action_toggleFullscreen()
|
|
||||||
|
|
||||||
Toggle fullscreen logs by either collapsing width or setting it to it’s original size
|
|
||||||
|
|
||||||
#### compose()
|
|
||||||
|
|
||||||
Build the TUI
|
|
||||||
|
|
||||||
#### do_set_cpu_percent(percent: float)
|
|
||||||
|
|
||||||
Set CPU percent in the label and progress bar
|
|
||||||
|
|
||||||
* **Parameters:**
|
|
||||||
**percent** (*float*) – Percent of the cpu PierMesh is using
|
|
||||||
|
|
||||||
#### do_set_mem(memmb: float)
|
|
||||||
|
|
||||||
Set memory usage label in the ui
|
|
||||||
|
|
||||||
* **Parameters:**
|
|
||||||
**memmb** (*float*) – Memory usage of PierMesh in megabytes
|
|
||||||
|
|
||||||
#### do_write_line(logLine: str)
|
|
||||||
|
|
||||||
Write line to the logs panel
|
|
||||||
|
|
||||||
* **Parameters:**
|
|
||||||
**logLine** (*str*) – Line to log
|
|
||||||
|
|
||||||
#### on_mount()
|
|
||||||
|
|
||||||
Called at set up, configures the title and the progess bar
|
|
|
@ -1,9 +1,9 @@
|
||||||
# DONT TOUCH THIS SECTION UNLESS YOU KNOW WHAT YOURE DOING
|
# DONT TOUCH THIS SECTION UNLESS YOU KNOW WHAT YOURE DOING
|
||||||
[DEFAULT]
|
[DEFAULT]
|
||||||
Nickname = node00
|
Nickname = node1
|
||||||
StartupDelay = 0
|
StartupDelay = 0
|
||||||
WebUIPort = 5000
|
WebUIPort = 5000
|
||||||
ShowTUI = True
|
ShowTUI = False
|
||||||
|
|
||||||
[OPERATOR_REQUIRED]
|
[OPERATOR_REQUIRED]
|
||||||
TransceiverPort = /dev/ttyACM0
|
TransceiverPort = /dev/ttyACM0
|
||||||
|
@ -12,3 +12,4 @@ PSK = jgf765!FS0+6
|
||||||
# DO YOUR NON REQUIRED SETTINGS HERE
|
# DO YOUR NON REQUIRED SETTINGS HERE
|
||||||
[OPERATOR_OVERRIDES]
|
[OPERATOR_OVERRIDES]
|
||||||
|
|
||||||
|
ShowTUI = False
|
||||||
|
|
|
@ -1,19 +1,21 @@
|
||||||
from bs4 import BeautifulSoup
|
# NOTE: Used for requesting web pages
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
import msgpack
|
# NOTE: Used for parsing web pages
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
import lzma
|
# NOTE: Generic imports
|
||||||
import base64
|
import base64
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from Packets.Messages.Protocols.hopper.Response import HopperResponse
|
|
||||||
|
|
||||||
logger = logging.getLogger("__main__." + __name__)
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
|
|
||||||
def downloadFile(url, text=True, mimeType=None):
|
def downloadFile(url, text=True, mimeType=None):
|
||||||
|
"""
|
||||||
|
Download resource from url and convert it to text or a data url
|
||||||
|
"""
|
||||||
fbytes = b""
|
fbytes = b""
|
||||||
with requests.get(url, stream=True) as r:
|
with requests.get(url, stream=True) as r:
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
@ -25,7 +27,7 @@ def downloadFile(url, text=True, mimeType=None):
|
||||||
if mimeType == None:
|
if mimeType == None:
|
||||||
mimeType, encoding = mimetypes.guess_type(url)
|
mimeType, encoding = mimetypes.guess_type(url)
|
||||||
if mimeType == None:
|
if mimeType == None:
|
||||||
raise Error(
|
raise ValueError(
|
||||||
"Couldnt guess mime type and none was supplied, cant encode to data url"
|
"Couldnt guess mime type and none was supplied, cant encode to data url"
|
||||||
)
|
)
|
||||||
b64str = base64.b64encode(fbytes).decode("utf-8")
|
b64str = base64.b64encode(fbytes).decode("utf-8")
|
||||||
|
|
|
@ -1,8 +1,25 @@
|
||||||
|
|
||||||
|
|
||||||
class Context:
|
class Context:
|
||||||
|
"""
|
||||||
|
Generic context data structure, current subclassed for use in filters, see Sponge/Protocols/Yellow.py
|
||||||
|
|
||||||
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Config/Context.py>`__
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
ctx: dict
|
||||||
|
Dictionary of context values
|
||||||
|
|
||||||
|
"""
|
||||||
def __init__(self, subsets: dict={}, **kwargs):
|
def __init__(self, subsets: dict={}, **kwargs):
|
||||||
# Subsets should be a dict of list of value keys
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
subsets: dict
|
||||||
|
Keys mapped to lists of keys for grouping values (unused currently)
|
||||||
|
kwargs: kwargs
|
||||||
|
key word arguments to map to the context as key/val pairs
|
||||||
|
"""
|
||||||
self.ctx = {}
|
self.ctx = {}
|
||||||
self.subsets = subsets
|
self.subsets = subsets
|
||||||
for key, val in kwargs.items():
|
for key, val in kwargs.items():
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
Context
|
||||||
|
=======
|
||||||
|
|
||||||
|
.. autoclass:: Config.Context.Context
|
||||||
|
:members:
|
|
@ -1,24 +1,25 @@
|
||||||
import base64
|
# NOTE: Generic imports
|
||||||
import os
|
import os
|
||||||
import lzma
|
import lzma
|
||||||
import logging
|
import logging
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
# NOTE: Import for handling message data
|
||||||
import msgpack
|
import msgpack
|
||||||
|
|
||||||
|
# NOTE: Cryptography imports
|
||||||
from Crypto.PublicKey import ECC
|
from Crypto.PublicKey import ECC
|
||||||
from Crypto.Hash import SHAKE128
|
from Crypto.Hash import SHAKE128
|
||||||
from Crypto.Protocol.DH import key_agreement
|
from Crypto.Protocol.DH import key_agreement
|
||||||
from Crypto.Cipher import AES
|
from Crypto.Cipher import AES
|
||||||
|
|
||||||
|
# NOTE: Daisy database import
|
||||||
from Daisy.Store import Store
|
from Daisy.Store import Store
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger("__main__." + __name__)
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
# TODO: Different store directories per node
|
# TODO: Different store directories per node
|
||||||
# TODO: First time psk transport initiation
|
|
||||||
# Add this credential manually, its picked up and used when the two nodes try to communicate before the session is encrypted
|
|
||||||
|
|
||||||
|
|
||||||
class Transport:
|
class Transport:
|
||||||
"""
|
"""
|
||||||
|
@ -27,11 +28,6 @@ class Transport:
|
||||||
|
|
||||||
Attributes
|
Attributes
|
||||||
----------
|
----------
|
||||||
cLog
|
|
||||||
Method reference to `run.Node.cLog` so we can log to the ui from here
|
|
||||||
|
|
||||||
loadedParams: dict
|
|
||||||
In memory representations of cryptography parameters
|
|
||||||
|
|
||||||
loadedKeys: dict
|
loadedKeys: dict
|
||||||
In memory representations of cryptography keys
|
In memory representations of cryptography keys
|
||||||
|
@ -39,7 +35,7 @@ class Transport:
|
||||||
nodeNickname: str
|
nodeNickname: str
|
||||||
Name of node for isolating configs when running multiple nodes
|
Name of node for isolating configs when running multiple nodes
|
||||||
|
|
||||||
cache: Components.daisy.Cache
|
cache: Daisy.Cache.Cache
|
||||||
Daisy cache for use in storing cryptography information
|
Daisy cache for use in storing cryptography information
|
||||||
|
|
||||||
publicKey
|
publicKey
|
||||||
|
@ -47,39 +43,43 @@ class Transport:
|
||||||
|
|
||||||
privateKey
|
privateKey
|
||||||
Private key for node
|
Private key for node
|
||||||
"""
|
|
||||||
|
|
||||||
|
daisyCryptography: Daisy.CryptographyUtil.SteelPetal
|
||||||
|
Record cryptography reference
|
||||||
|
"""
|
||||||
def __init__(self, cache, nodeNickname, daisyCryptography, psk):
|
def __init__(self, cache, nodeNickname, daisyCryptography, psk):
|
||||||
"""
|
"""
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
cache: Components.daisy.Cache
|
cache: Daisy.Cache.Cache
|
||||||
Reference to the node instances Daisy cache
|
Reference to the node instances Daisy cache
|
||||||
|
|
||||||
nodeNickname: str
|
nodeNickname: str
|
||||||
Node nickname for record storage
|
Node nickname for record storage
|
||||||
|
|
||||||
cLog
|
daisyCryptography: Daisy.CryptographyUtil.SteelPetal
|
||||||
Reference to `run.Node.cLog`
|
Record cryptography reference
|
||||||
|
|
||||||
|
psk: str
|
||||||
|
Plaintext pre shared key
|
||||||
"""
|
"""
|
||||||
self.stores = {}
|
self.stores = {}
|
||||||
self.nodeNickname = nodeNickname
|
self.nodeNickname = nodeNickname
|
||||||
self.cache = cache
|
self.cache = cache
|
||||||
self.daisyCryptography = daisyCryptography
|
self.daisyCryptography = daisyCryptography
|
||||||
if os.path.exists("daisy/cryptography/{0}/key".format(nodeNickname)) == False:
|
if not os.path.exists("{0}/daisy/cryptography/key".format(nodeNickname)):
|
||||||
logger.log(20, "Key store DNE, initializing")
|
logger.info("Key store DNE, initializing")
|
||||||
self.initStore("key")
|
self.initStore("key")
|
||||||
else:
|
else:
|
||||||
logger.log(20, "Key store exists, loading")
|
logger.info("Key store exists, loading")
|
||||||
self.stores["key"] = Store(
|
self.stores["key"] = Store(
|
||||||
"key", "cryptography", nodeNickname, daisyCryptography
|
"key", "cryptography", nodeNickname, daisyCryptography
|
||||||
)
|
)
|
||||||
logger.log(20, "Store loaded")
|
logger.info("Key store initialized")
|
||||||
logger.log(20, "Key store initialized")
|
|
||||||
srecord = self.getRecord("key", "self")
|
srecord = self.getRecord("key", "self")
|
||||||
if srecord == False:
|
if srecord == False:
|
||||||
self.stores["key"].createEmpty("self")
|
self.stores["key"].createEmpty("self")
|
||||||
|
# TODO: Note that this happens in the docs
|
||||||
self.stores["key"].update(
|
self.stores["key"].update(
|
||||||
"self",
|
"self",
|
||||||
{"PSK": self.daisyCryptography.pad(psk).encode("utf-8")},
|
{"PSK": self.daisyCryptography.pad(psk).encode("utf-8")},
|
||||||
|
@ -95,56 +95,12 @@ class Transport:
|
||||||
}, write=False)
|
}, write=False)
|
||||||
|
|
||||||
def kdf(self, bytesX):
|
def kdf(self, bytesX):
|
||||||
|
"""
|
||||||
|
Key derivation function
|
||||||
|
"""
|
||||||
|
# TODO: Better explanation
|
||||||
return SHAKE128.new(bytesX).read(32)
|
return SHAKE128.new(bytesX).read(32)
|
||||||
|
|
||||||
def checkInMem(self, store: str, nodeID: str, checkFieldsExist=[]):
|
|
||||||
"""
|
|
||||||
Check if parameters or keys are loaded for node of nodeID
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
store: str
|
|
||||||
Whether to check loaded keys or parameters
|
|
||||||
|
|
||||||
"""
|
|
||||||
if store == "param":
|
|
||||||
return nodeID in self.loadedParams.keys()
|
|
||||||
elif store == "key":
|
|
||||||
record = self.getRecord("key", nodeID)
|
|
||||||
if record != False:
|
|
||||||
for field in checkFieldsExist:
|
|
||||||
if not (field in record.keys()):
|
|
||||||
if field == "staticKey":
|
|
||||||
self.genStaticKey(nodeID)
|
|
||||||
elif field == "ourEphemeralKey":
|
|
||||||
self.genOurEphemeralKey(nodeID)
|
|
||||||
|
|
||||||
def loadRecordToMem(self, store: str, nodeID: str):
|
|
||||||
"""
|
|
||||||
Load record of nodeID from store to either keys or pameters
|
|
||||||
"""
|
|
||||||
r = self.getRecord(store, nodeID)
|
|
||||||
if r == False:
|
|
||||||
logger.log(
|
|
||||||
30, "Tried to load nonexistent {0} for node {1}".format(store, nodeID)
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
elif self.checkInMem(store, nodeID):
|
|
||||||
logger.log(10, "{0}s already deserialized, skipping".format(store))
|
|
||||||
else:
|
|
||||||
if store == "param":
|
|
||||||
self.loadedParams[nodeID] = self.loadParamBytes(r)
|
|
||||||
"""
|
|
||||||
elif store == "key":
|
|
||||||
self.loadedKeys[nodeID] = {
|
|
||||||
"publicKey": Serialization.load_pem_public_key(r["publicKey"]),
|
|
||||||
"privateKey": Serialization.load_pem_private_key(
|
|
||||||
r["privateKey"], None
|
|
||||||
),
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
return True
|
|
||||||
|
|
||||||
def getRecord(self, store: str, key: str, ephemeral=False):
|
def getRecord(self, store: str, key: str, ephemeral=False):
|
||||||
"""
|
"""
|
||||||
Get record from store: store with key: key
|
Get record from store: store with key: key
|
||||||
|
@ -156,7 +112,6 @@ class Transport:
|
||||||
else:
|
else:
|
||||||
return r
|
return r
|
||||||
|
|
||||||
# TODO: Fix stores, URGENT
|
|
||||||
def initStore(self, store: str):
|
def initStore(self, store: str):
|
||||||
"""
|
"""
|
||||||
Initialize store: store
|
Initialize store: store
|
||||||
|
@ -164,23 +119,14 @@ class Transport:
|
||||||
self.stores[store] = Store(
|
self.stores[store] = Store(
|
||||||
store, "cryptography", self.nodeNickname, self.daisyCryptography
|
store, "cryptography", self.nodeNickname, self.daisyCryptography
|
||||||
)
|
)
|
||||||
if store == "param":
|
if store == "key":
|
||||||
self.genParams()
|
|
||||||
self.stores[store].update("self", self.getParamsBytes(), recur=False)
|
|
||||||
elif store == "key":
|
|
||||||
self.stores[store].update("self", {}, recur=False)
|
self.stores[store].update("self", {}, recur=False)
|
||||||
else:
|
else:
|
||||||
logger.log(30, "Store not defined")
|
logger.warning("Store not defined")
|
||||||
|
|
||||||
def genStaticKey(self, onodeID, paramsOverride=False):
|
def genStaticKey(self, onodeID):
|
||||||
"""
|
"""
|
||||||
Generate public and private keys from self.params (TODO: Gen from passed params)
|
Generate static key for session encryption with given node
|
||||||
|
|
||||||
paramsOverride
|
|
||||||
False or parameters to use (TODO)
|
|
||||||
|
|
||||||
setSelf: bool
|
|
||||||
Whether to set self.privateKey and self.publicKey
|
|
||||||
"""
|
"""
|
||||||
staticKey = ECC.generate(curve="p256")
|
staticKey = ECC.generate(curve="p256")
|
||||||
self.stores["key"].update(
|
self.stores["key"].update(
|
||||||
|
@ -194,10 +140,25 @@ class Transport:
|
||||||
self.stores["key"].update(onodeID, {"staticKey": staticKey}, write=False)
|
self.stores["key"].update(onodeID, {"staticKey": staticKey}, write=False)
|
||||||
|
|
||||||
def genOurEphemeralKey(self, onodeID):
|
def genOurEphemeralKey(self, onodeID):
|
||||||
|
"""
|
||||||
|
Generate epehemeral key for session encryption with given node
|
||||||
|
"""
|
||||||
ourEphemeralKey = ECC.generate(curve="p256")
|
ourEphemeralKey = ECC.generate(curve="p256")
|
||||||
self.stores["key"].update(onodeID, {"ourEphemeralKey": ourEphemeralKey}, write=False)
|
self.stores["key"].update(onodeID, {"ourEphemeralKey": ourEphemeralKey}, write=False)
|
||||||
|
|
||||||
def addPublickey(self, onodeID, publicKey, forSelf=False):
|
def addPublickey(self, onodeID, publicKey, forSelf: bool = False):
|
||||||
|
"""
|
||||||
|
Add a public key for a given node including this one
|
||||||
|
|
||||||
|
onodeID
|
||||||
|
Node identifier
|
||||||
|
|
||||||
|
publicKey
|
||||||
|
Public key to add
|
||||||
|
|
||||||
|
forSelf: bool
|
||||||
|
Whether to add key for this node
|
||||||
|
"""
|
||||||
if forSelf:
|
if forSelf:
|
||||||
publicKey = ECC.generate(curve="p256")
|
publicKey = ECC.generate(curve="p256")
|
||||||
self.stores["key"].update("self", {
|
self.stores["key"].update("self", {
|
||||||
|
@ -210,30 +171,48 @@ class Transport:
|
||||||
},
|
},
|
||||||
write=False)
|
write=False)
|
||||||
else:
|
else:
|
||||||
# TODO: Fix stores
|
logger.info("Importing keys")
|
||||||
# self.stores["key"].update(onodeID, {"publicKey": publicKey})
|
|
||||||
logger.log(20, "Importing keys")
|
|
||||||
record = self.getRecord("key", onodeID)
|
record = self.getRecord("key", onodeID)
|
||||||
if record == False:
|
if record == False:
|
||||||
self.stores["key"].createEmpty(onodeID)
|
self.stores["key"].createEmpty(onodeID)
|
||||||
self.stores["key"].update(onodeID, {"publicKey": publicKey})
|
self.stores["key"].update(onodeID, {"publicKey": publicKey})
|
||||||
self.stores["key"].update(onodeID, {"publicKey": ECC.import_key(publicKey)}, write=False)
|
self.stores["key"].update(onodeID, {"publicKey": ECC.import_key(publicKey)}, write=False)
|
||||||
|
|
||||||
def addPeerEphemeralKey(self, onodeID, peerEphemeralKey):
|
def addPeerEphemeralKey(self, onodeID, peerEphemeralKey: bytes):
|
||||||
|
"""
|
||||||
|
Add a peer node's epehemeral key for session encryption
|
||||||
|
|
||||||
|
onodeID
|
||||||
|
Node identifier
|
||||||
|
|
||||||
|
peerEphemeralKey: bytes
|
||||||
|
Serialized ephemeral key
|
||||||
|
"""
|
||||||
self.stores["key"].update(onodeID, {"peerEphemeralKey": ECC.import_key(peerEphemeralKey)}, write=False)
|
self.stores["key"].update(onodeID, {"peerEphemeralKey": ECC.import_key(peerEphemeralKey)}, write=False)
|
||||||
|
|
||||||
def sessionSetup(self, onodeID, peerEphemeralKey):
|
def sessionSetup(self, onodeID, peerEphemeralKey: bytes):
|
||||||
# TODO: Deeper checking before loading
|
"""
|
||||||
# TODO: Loading existing records
|
Set up transport encryption session
|
||||||
if self.getRecord("key", onodeID) == False:
|
|
||||||
logger.log(30, "No record, waiting for announce")
|
onodeID
|
||||||
|
Node identifier
|
||||||
|
|
||||||
|
peerEphemeralKey: bytes
|
||||||
|
Serialized ephemeral key
|
||||||
|
"""
|
||||||
|
if not self.getRecord("key", onodeID):
|
||||||
|
logger.warning("No record, waiting for announce")
|
||||||
else:
|
else:
|
||||||
self.addPeerEphemeralKey(onodeID, peerEphemeralKey)
|
self.addPeerEphemeralKey(onodeID, peerEphemeralKey)
|
||||||
self.generateSessionKey(onodeID)
|
self.generateSessionKey(onodeID)
|
||||||
|
|
||||||
def generateSessionKey(self, onodeID):
|
def generateSessionKey(self, onodeID):
|
||||||
# TODO: Gen static key if not exists
|
"""
|
||||||
# TODO: Gen our ephemeral key if not exists
|
Generate session key for transport encryption
|
||||||
|
|
||||||
|
onodeID
|
||||||
|
Node identifier
|
||||||
|
"""
|
||||||
keysOb = self.getRecord("key", onodeID, ephemeral=True)
|
keysOb = self.getRecord("key", onodeID, ephemeral=True)
|
||||||
if ("publicKey" not in keysOb) or ("staticKey" not in keysOb):
|
if ("publicKey" not in keysOb) or ("staticKey" not in keysOb):
|
||||||
dkeysOb = self.getRecord("key", onodeID)
|
dkeysOb = self.getRecord("key", onodeID)
|
||||||
|
@ -273,16 +252,18 @@ class Transport:
|
||||||
self.stores["key"].update(onodeID, {"sessionKey": sessionKey}, write=False)
|
self.stores["key"].update(onodeID, {"sessionKey": sessionKey}, write=False)
|
||||||
return sessionKey
|
return sessionKey
|
||||||
|
|
||||||
# TODO: Build in transport security (node/node)
|
|
||||||
def encrypt(self, data, nodeID: str, isDict: bool = True, pskEncrypt=False):
|
def encrypt(self, data, nodeID: str, isDict: bool = True, pskEncrypt=False):
|
||||||
"""
|
"""
|
||||||
Do Fernet encryption
|
Encrypt given data with AES GCM
|
||||||
|
|
||||||
data
|
data
|
||||||
Either bytes or dict to encrypt
|
Either bytes or dict to encrypt
|
||||||
|
|
||||||
isDict: bool
|
isDict: bool
|
||||||
Whether data is a dictionary
|
Whether data is a dictionary
|
||||||
|
|
||||||
|
pskEncrypt: bool
|
||||||
|
Whether to encrypt with pre-shared key
|
||||||
"""
|
"""
|
||||||
if nodeID == "-00001" or pskEncrypt:
|
if nodeID == "-00001" or pskEncrypt:
|
||||||
cipher = AES.new(self.getRecord("key", "self", ephemeral=True)["PSK"], AES.MODE_GCM)
|
cipher = AES.new(self.getRecord("key", "self", ephemeral=True)["PSK"], AES.MODE_GCM)
|
||||||
|
@ -317,24 +298,33 @@ class Transport:
|
||||||
else:
|
else:
|
||||||
logger.log(20, "Node {0} does not have session key".format(nodeID))
|
logger.log(20, "Node {0} does not have session key".format(nodeID))
|
||||||
|
|
||||||
def decrypt(self, data, nodeID: str, nonce, tag):
|
def decrypt(self, data, onodeID: str, nonce, tag):
|
||||||
"""
|
"""
|
||||||
Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load)
|
Decrypt bytes and return either str or dict depending on result
|
||||||
|
|
||||||
|
onodeID: str
|
||||||
|
Node identifier
|
||||||
|
|
||||||
|
nonce
|
||||||
|
Encryption nonce
|
||||||
|
|
||||||
|
tag
|
||||||
|
Encryption tag
|
||||||
"""
|
"""
|
||||||
# TODO: Handling existing record
|
record = self.getRecord("key", onodeID, ephemeral=True)
|
||||||
record = self.getRecord("key", nodeID, ephemeral=True)
|
|
||||||
if (record == False) or ("sessionKey" not in record.keys()):
|
if (record == False) or ("sessionKey" not in record.keys()):
|
||||||
cipher = AES.new(self.getRecord("key", "self", ephemeral=True)["PSK"], AES.MODE_GCM, nonce=nonce)
|
cipher = AES.new(self.getRecord("key", "self", ephemeral=True)["PSK"], AES.MODE_GCM, nonce=nonce)
|
||||||
|
|
||||||
data = cipher.decrypt(data)
|
data = cipher.decrypt(data)
|
||||||
logger.log(10, data)
|
# logger.debug(data)
|
||||||
#data = msgpack.loads(data)
|
try:
|
||||||
data = msgpack.loads(lzma.decompress(data))
|
data = msgpack.loads(lzma.decompress(data))
|
||||||
logger.log(10, "Decrypt/deserialize output")
|
except Exception:
|
||||||
logger.log(10, data)
|
logger.error(traceback.format_exc())
|
||||||
|
return False
|
||||||
|
# logger.debug("Decrypt/deserialize output")
|
||||||
|
# logger.debug(data)
|
||||||
return data
|
return data
|
||||||
# logger.log(20, "Node {0} not in keychain".format(nodeID))
|
|
||||||
# return False
|
|
||||||
else:
|
else:
|
||||||
if "sessionKey" in record.keys():
|
if "sessionKey" in record.keys():
|
||||||
sessionKey = record["sessionKey"]
|
sessionKey = record["sessionKey"]
|
||||||
|
@ -352,5 +342,5 @@ class Transport:
|
||||||
|
|
||||||
return data
|
return data
|
||||||
else:
|
else:
|
||||||
logger.log(20, "Node {0} does not have session key".format(nodeID))
|
logger.warning("Node {0} does not have session key".format(onodeID))
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
|
|
||||||
WhaleSong: Diffie hellman ephemeral Fernet based encryption
|
WhaleSong
|
||||||
===========================================================
|
=========
|
||||||
|
|
||||||
.. autoclass:: Cryptography.WhaleSong.DHEFern
|
.. autoclass:: Cryptography.WhaleSong.Transport
|
||||||
:members:
|
:members:
|
||||||
:undoc-members:
|
:undoc-members:
|
||||||
|
|
|
@ -1,17 +1,18 @@
|
||||||
from Daisy.Daisy import Daisy
|
from Daisy.Daisy import Daisy
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
import msgpack
|
import msgpack
|
||||||
|
|
||||||
from watchdog.observers import Observer
|
|
||||||
|
|
||||||
# TODO: Dumping to cacheFile
|
# TODO: Dumping to cacheFile
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
class Cache:
|
class Cache:
|
||||||
"""
|
"""
|
||||||
In memory collection of Daisy records
|
In memory collection of Daisy records, provides a search functionality currently utilized by `Daisy.Catch.Catch`
|
||||||
|
|
||||||
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Cache.py>`__
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Cache.py>`__
|
||||||
"""
|
"""
|
||||||
|
@ -22,26 +23,25 @@ class Cache:
|
||||||
filepaths=None,
|
filepaths=None,
|
||||||
cacheFile=None,
|
cacheFile=None,
|
||||||
path: str = "daisy",
|
path: str = "daisy",
|
||||||
walk: bool = False,
|
walk: bool = False
|
||||||
isCatch: bool = False,
|
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
|
daisyCryptography: Daisy.CryptographyUtil.SteelPetal
|
||||||
|
Record cryptography reference
|
||||||
|
|
||||||
filepaths
|
filepaths
|
||||||
Either a list of filepaths to load or None
|
Either a list of filepaths to load or None
|
||||||
|
|
||||||
cacheFile
|
cacheFile
|
||||||
Path to a cache file which is a collection of paths to load
|
Path to a cache file which is a collection of paths to load or None
|
||||||
|
|
||||||
path: str
|
path: str
|
||||||
Path prefix to load records from
|
Path prefix to load records from
|
||||||
|
|
||||||
walk: bool
|
walk: bool
|
||||||
Whether to automatically walk the path and load records
|
Whether to automatically walk the path and load records
|
||||||
|
|
||||||
isCatch: bool
|
|
||||||
Whether this cache is for catchs
|
|
||||||
"""
|
"""
|
||||||
self.daisyCryptography = daisyCryptography
|
self.daisyCryptography = daisyCryptography
|
||||||
self.data = {}
|
self.data = {}
|
||||||
|
@ -50,12 +50,13 @@ class Cache:
|
||||||
if not os.path.exists(self.path):
|
if not os.path.exists(self.path):
|
||||||
os.makedirs(self.path)
|
os.makedirs(self.path)
|
||||||
|
|
||||||
if filepaths != None:
|
if filepaths is not None:
|
||||||
for fp in filepaths:
|
for fp in filepaths:
|
||||||
fp = path + "/" + fp
|
fp = path + "/" + fp
|
||||||
if os.path.isfile(fp):
|
if os.path.isfile(fp):
|
||||||
self.data[fp] = Daisy(fp, daisyCryptography)
|
self.data[fp] = Daisy(fp, daisyCryptography)
|
||||||
elif cacheFile != None:
|
elif cacheFile is not None:
|
||||||
|
self.cacheFile = cacheFile
|
||||||
with open(cacheFile, "r") as f:
|
with open(cacheFile, "r") as f:
|
||||||
for fp in f.read().split("\n"):
|
for fp in f.read().split("\n"):
|
||||||
self.data[fp] = Daisy(fp, daisyCryptography)
|
self.data[fp] = Daisy(fp, daisyCryptography)
|
||||||
|
@ -78,17 +79,23 @@ class Cache:
|
||||||
|
|
||||||
data: dict
|
data: dict
|
||||||
Data to populate record with
|
Data to populate record with
|
||||||
|
|
||||||
|
remote: bool
|
||||||
|
Whether this is a reference to a distributed file (not implemented yet)
|
||||||
"""
|
"""
|
||||||
if remote == False:
|
if not remote:
|
||||||
with open(self.path + "/" + path, "wb") as f:
|
with open(self.path + "/" + path, "wb") as f:
|
||||||
f.write(msgpack.dumps(data))
|
f.write(msgpack.dumps(data))
|
||||||
# logging.log(10, "Done creating record")
|
logger.debug("Done creating record")
|
||||||
self.data[path] = Daisy(self.path + "/" + path, self.daisyCryptography)
|
self.data[path] = Daisy(self.path + "/" + path, self.daisyCryptography)
|
||||||
# logging.log(10, "Done loading to Daisy")
|
logger.debug("Done loading to Daisy")
|
||||||
return self.data[path]
|
return self.data[path]
|
||||||
else:
|
else:
|
||||||
self.data[path] = Ref(path, remote)
|
logger.debug("Not that (you shouldn't be here yet, remote Daisy links aren't ready yet)")
|
||||||
return self.data[path]
|
# TODO: Full remote path functionality
|
||||||
|
pass
|
||||||
|
# self.data[path] = Ref(path, remote)
|
||||||
|
# return self.data[path]
|
||||||
|
|
||||||
def get(self, path: str):
|
def get(self, path: str):
|
||||||
"""
|
"""
|
||||||
|
@ -104,7 +111,8 @@ class Cache:
|
||||||
self.data[path] = Daisy(self.path + "/" + path, self.daisyCryptography)
|
self.data[path] = Daisy(self.path + "/" + path, self.daisyCryptography)
|
||||||
return self.data[path]
|
return self.data[path]
|
||||||
else:
|
else:
|
||||||
# logging.log(10, "File does not exist")
|
path = self.path + "/" + path
|
||||||
|
logger.debug(f"File {path} does not exist")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def refresh(self):
|
def refresh(self):
|
||||||
|
@ -116,18 +124,18 @@ class Cache:
|
||||||
|
|
||||||
def search(self, keydict: dict, strict: bool = True):
|
def search(self, keydict: dict, strict: bool = True):
|
||||||
"""
|
"""
|
||||||
Search cache for record for records with values
|
Search cache for record for records with keys and values matching those
|
||||||
|
in the keydict
|
||||||
|
|
||||||
keydict: dict
|
keydict: dict
|
||||||
Values to search for
|
|
||||||
|
|
||||||
strict: bool
|
strict: bool
|
||||||
Whether to require values match
|
Whether to require all keys/values match
|
||||||
"""
|
"""
|
||||||
results = []
|
results = []
|
||||||
for key, val in self.data.items():
|
for key, val in self.data.items():
|
||||||
val = val.get()
|
val = val.get()
|
||||||
if strict and type(val) != str:
|
if strict and type(val) is not str:
|
||||||
addcheck = False
|
addcheck = False
|
||||||
for k, v in keydict.items():
|
for k, v in keydict.items():
|
||||||
if k in val.keys():
|
if k in val.keys():
|
||||||
|
@ -138,7 +146,7 @@ class Cache:
|
||||||
break
|
break
|
||||||
if addcheck:
|
if addcheck:
|
||||||
results.append([key, val])
|
results.append([key, val])
|
||||||
elif type(val) != str:
|
elif type(val) is not str:
|
||||||
for k, v in keydict.items():
|
for k, v in keydict.items():
|
||||||
if k in val.keys():
|
if k in val.keys():
|
||||||
if v in val[k]:
|
if v in val[k]:
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
|
# NOTE: Local imports
|
||||||
from Daisy.Cache import Cache
|
from Daisy.Cache import Cache
|
||||||
from Daisy.Ref import Ref
|
# from Daisy.Ref import Ref
|
||||||
|
|
||||||
|
# NOTE: Generic imports
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import uuid
|
|
||||||
|
|
||||||
|
|
||||||
class Catch(Cache):
|
class Catch(Cache):
|
||||||
|
@ -26,7 +27,22 @@ class Catch(Cache):
|
||||||
walk: bool = False,
|
walk: bool = False,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Basically the same initialization parameters as Catch
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
Path prefix to load records from
|
||||||
|
|
||||||
|
daisyCryptography: Daisy.CryptographyUtil.SteelPetal
|
||||||
|
Record cryptography reference
|
||||||
|
|
||||||
|
filepaths
|
||||||
|
Either a list of filepaths to load or None
|
||||||
|
|
||||||
|
cacheFile
|
||||||
|
Path to a cache file which is a collection of paths to load or None
|
||||||
|
|
||||||
|
walk: bool
|
||||||
|
Whether to automatically walk the path and load records
|
||||||
"""
|
"""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
daisyCryptography,
|
daisyCryptography,
|
||||||
|
@ -34,7 +50,6 @@ class Catch(Cache):
|
||||||
cacheFile=catchFile,
|
cacheFile=catchFile,
|
||||||
path=path,
|
path=path,
|
||||||
walk=walk,
|
walk=walk,
|
||||||
isCatch=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: Fins
|
# TODO: Fins
|
||||||
|
@ -62,7 +77,7 @@ class Catch(Cache):
|
||||||
List of (maximum 8 characters) strings at the end of the catch oe None if none
|
List of (maximum 8 characters) strings at the end of the catch oe None if none
|
||||||
"""
|
"""
|
||||||
r = ""
|
r = ""
|
||||||
if fins != None and fins != "":
|
if fins is not None and fins != "":
|
||||||
r = self.search({"head": head, "body": body, "fins": fins})
|
r = self.search({"head": head, "body": body, "fins": fins})
|
||||||
else:
|
else:
|
||||||
r = self.search({"head": head, "body": body})
|
r = self.search({"head": head, "body": body})
|
||||||
|
@ -73,16 +88,16 @@ class Catch(Cache):
|
||||||
|
|
||||||
def addc(self, peer, node, seperator, head, body, data, fins=None, remote=False):
|
def addc(self, peer, node, seperator, head, body, data, fins=None, remote=False):
|
||||||
tnpath = f"catch/{node}"
|
tnpath = f"catch/{node}"
|
||||||
if os.path.exists(self.path + "/" + tnpath) != True:
|
if not os.path.exists(self.path + "/" + tnpath):
|
||||||
os.makedirs(self.path + "/" + tnpath)
|
os.makedirs(self.path + "/" + tnpath)
|
||||||
tppath = tnpath + "/" + peer
|
tppath = tnpath + "/" + peer
|
||||||
if os.path.exists(self.path + "/" + tppath) != True:
|
if not os.path.exists(self.path + "/" + tppath):
|
||||||
os.makedirs(self.path + "/" + tppath)
|
os.makedirs(self.path + "/" + tppath)
|
||||||
sid = str(random.randrange(0, 999999)).zfill(6)
|
sid = str(random.randrange(0, 999999)).zfill(6)
|
||||||
data["seperator"] = seperator
|
data["seperator"] = seperator
|
||||||
data["head"] = head
|
data["head"] = head
|
||||||
data["body"] = body
|
data["body"] = body
|
||||||
if fins != None:
|
if fins is not None:
|
||||||
data["fins"] = fins
|
data["fins"] = fins
|
||||||
res = self.create("{0}/{1}".format(tppath, sid), data, remote=remote)
|
res = self.create("{0}/{1}".format(tppath, sid), data, remote=remote)
|
||||||
return [sid, res]
|
return [sid, res]
|
||||||
|
@ -91,7 +106,7 @@ class Catch(Cache):
|
||||||
dirList = []
|
dirList = []
|
||||||
for k, v in self.data.items():
|
for k, v in self.data.items():
|
||||||
curCatch = {"remoteNode": onodeID}
|
curCatch = {"remoteNode": onodeID}
|
||||||
if type(v.msg) != str:
|
if type(v.msg) is not str:
|
||||||
curCatch = curCatch | v.msg
|
curCatch = curCatch | v.msg
|
||||||
del curCatch["html"]
|
del curCatch["html"]
|
||||||
dirList.append(curCatch)
|
dirList.append(curCatch)
|
||||||
|
|
|
@ -2,6 +2,9 @@ from Daisy.Daisy import Daisy
|
||||||
|
|
||||||
|
|
||||||
class Credential(Daisy):
|
class Credential(Daisy):
|
||||||
|
"""
|
||||||
|
Currently unused credential class, will be fleshed out for credentialed access to the web ui
|
||||||
|
"""
|
||||||
def __init__(self, nodeNickname, credentialName, extension, daisyCryptography):
|
def __init__(self, nodeNickname, credentialName, extension, daisyCryptography):
|
||||||
fname = "data/{0}/{1}.{2}".format(nodeNickname, credentialName, extension)
|
fname = "data/{0}/{1}.{2}".format(nodeNickname, credentialName, extension)
|
||||||
super().__init__(
|
super().__init__(
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
Credential
|
||||||
|
==========
|
||||||
|
|
||||||
|
.. autoclass:: Daisy.Credential.Credential
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -1,4 +1,7 @@
|
||||||
|
# NOTE: Cryptography import
|
||||||
from Crypto.Cipher import AES
|
from Crypto.Cipher import AES
|
||||||
|
|
||||||
|
# NOTE: Generic imports
|
||||||
import traceback
|
import traceback
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -7,9 +10,22 @@ logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
|
|
||||||
class SteelPetal:
|
class SteelPetal:
|
||||||
def __init__(self, key, nonce=None, testData=None):
|
"""
|
||||||
|
Cryptography utility for encrypting files
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, key: str, nonce=None, testData=None):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
key: str
|
||||||
|
User's plaintext key
|
||||||
|
|
||||||
|
nonce
|
||||||
|
Cryptographic artifact we can use to reinitialize cryptographic operations
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
if nonce == None:
|
if nonce is None:
|
||||||
self.cipher = AES.new(self.pad(key).encode("utf-8"), AES.MODE_GCM)
|
self.cipher = AES.new(self.pad(key).encode("utf-8"), AES.MODE_GCM)
|
||||||
self.nonce = self.cipher.nonce
|
self.nonce = self.cipher.nonce
|
||||||
else:
|
else:
|
||||||
|
@ -17,30 +33,48 @@ class SteelPetal:
|
||||||
self.pad(key).encode("utf-8"), AES.MODE_GCM, nonce=nonce
|
self.pad(key).encode("utf-8"), AES.MODE_GCM, nonce=nonce
|
||||||
)
|
)
|
||||||
self.nonce = nonce
|
self.nonce = nonce
|
||||||
if testData != None:
|
if testData is not None:
|
||||||
try:
|
try:
|
||||||
self.cipher.decrypt(testData)
|
self.cipher.decrypt(testData)
|
||||||
except:
|
except Exception:
|
||||||
logger.log(30, traceback.format_exc())
|
logger.error(traceback.format_exc())
|
||||||
return False
|
return False
|
||||||
except:
|
except Exception:
|
||||||
logger.log(30, traceback.format_exc())
|
logger.error(traceback.format_exc())
|
||||||
|
|
||||||
def pad(self, key):
|
def pad(self, key: str):
|
||||||
|
"""
|
||||||
|
Pad key to make it usable
|
||||||
|
|
||||||
|
key: str
|
||||||
|
User's plain text key
|
||||||
|
"""
|
||||||
BS = AES.block_size
|
BS = AES.block_size
|
||||||
key = key + (BS - len(key) % BS) * chr(BS - len(key) % BS)
|
key = key + (BS - len(key) % BS) * chr(BS - len(key) % BS)
|
||||||
return key
|
return key
|
||||||
|
|
||||||
def encrypt(self, data):
|
def encrypt(self, data: bytes):
|
||||||
|
"""
|
||||||
|
Encrypt binary data
|
||||||
|
|
||||||
|
data: bytes
|
||||||
|
Data to encrypt
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
return self.cipher.encrypt_and_digest(data)
|
return self.cipher.encrypt_and_digest(data)
|
||||||
except:
|
except Exception:
|
||||||
logger.log(20, traceback.format_exec())
|
logger.error(traceback.format_exec())
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def decrypt(self, data):
|
def decrypt(self, data: bytes):
|
||||||
|
"""
|
||||||
|
Decrypt encrypted binary data
|
||||||
|
|
||||||
|
data: bytes
|
||||||
|
Data to decrypt
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
return self.cipher.decrypt(data)
|
return self.cipher.decrypt(data)
|
||||||
except:
|
except Exception:
|
||||||
logger.log(20, traceback.format_exec())
|
logger.error(traceback.format_exec())
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
CryptographyUtil
|
||||||
|
================
|
||||||
|
|
||||||
|
.. autoclass:: Daisy.CryptographyUtil.SteelPetal
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -4,9 +4,6 @@ import msgpack
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# TODO: delete
|
# TODO: delete
|
||||||
# TODO: propagate json changes to msgpack automatically
|
|
||||||
# TODO: propagate msgpack changes to cache automatically
|
|
||||||
# TODO: Indexing
|
|
||||||
|
|
||||||
logger = logging.getLogger("__main__." + __name__)
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
|
@ -42,7 +39,6 @@ class Daisy:
|
||||||
In memory representation
|
In memory representation
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# TODO: Strong encrypt
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
filepath: str,
|
filepath: str,
|
||||||
|
@ -64,30 +60,31 @@ class Daisy:
|
||||||
template: bool
|
template: bool
|
||||||
Which template to Use
|
Which template to Use
|
||||||
|
|
||||||
prefillDict: bool
|
prefillDict: dict
|
||||||
Whether to fill the record with a template
|
Data to prefill record with
|
||||||
"""
|
"""
|
||||||
|
# TODO: Finish remote implementation
|
||||||
self.remote = False
|
self.remote = False
|
||||||
self.filepath = filepath
|
self.filepath = filepath
|
||||||
if remote != False:
|
if remote:
|
||||||
self.remote = True
|
self.remote = True
|
||||||
self.remoteNodeID = remote
|
self.remoteNodeID = remote
|
||||||
else:
|
else:
|
||||||
if os.path.exists(filepath) != True:
|
if not os.path.exists(filepath):
|
||||||
with open(filepath, "wb") as f:
|
with open(filepath, "wb") as f:
|
||||||
if template != False:
|
if template:
|
||||||
if template in templates.keys():
|
if template in templates.keys():
|
||||||
t = templates[template].get()
|
t = templates[template].get()
|
||||||
if prefillDict != False:
|
if prefillDict:
|
||||||
for k in prefillDict.keys():
|
for k in prefillDict.keys():
|
||||||
t[k] = prefillDict[k]
|
t[k] = prefillDict[k]
|
||||||
f.write(msgpack.dumps(t))
|
f.write(msgpack.dumps(t))
|
||||||
self.msg = t
|
self.msg = t
|
||||||
else:
|
else:
|
||||||
logger.log(20, "No such template as: " + template)
|
logger.error("No such template as: " + template)
|
||||||
else:
|
else:
|
||||||
t = {}
|
t = {}
|
||||||
if prefillDict != False:
|
if prefillDict:
|
||||||
for k in prefillDict.keys():
|
for k in prefillDict.keys():
|
||||||
t[k] = prefillDict[k]
|
t[k] = prefillDict[k]
|
||||||
f.write(msgpack.dumps(t))
|
f.write(msgpack.dumps(t))
|
||||||
|
@ -98,8 +95,6 @@ class Daisy:
|
||||||
with open(filepath, "rb") as f:
|
with open(filepath, "rb") as f:
|
||||||
self.msg = msgpack.loads(f.read())
|
self.msg = msgpack.loads(f.read())
|
||||||
|
|
||||||
# Use override for updating
|
|
||||||
|
|
||||||
def write(
|
def write(
|
||||||
self,
|
self,
|
||||||
override=False,
|
override=False,
|
||||||
|
@ -108,7 +103,7 @@ class Daisy:
|
||||||
recur: bool = False,
|
recur: bool = False,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Write record to disk
|
Write record to disk, note: use override with updated record to update record
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
|
@ -124,11 +119,11 @@ class Daisy:
|
||||||
recur: bool
|
recur: bool
|
||||||
Whether to recursively handle keys
|
Whether to recursively handle keys
|
||||||
"""
|
"""
|
||||||
if override != False:
|
if override:
|
||||||
for key in override.keys():
|
for key in override.keys():
|
||||||
# TODO: Deeper recursion
|
# TODO: Deeper recursion
|
||||||
if recur:
|
if recur:
|
||||||
if not key in self.msg.keys():
|
if key not in self.msg.keys():
|
||||||
self.msg[key] = {}
|
self.msg[key] = {}
|
||||||
for ikey in override[key].keys():
|
for ikey in override[key].keys():
|
||||||
self.msg[key][ikey] = override[key][ikey]
|
self.msg[key][ikey] = override[key][ikey]
|
||||||
|
|
|
@ -2,15 +2,37 @@ from Daisy.Daisy import Daisy
|
||||||
|
|
||||||
|
|
||||||
class Index(Daisy):
|
class Index(Daisy):
|
||||||
|
"""
|
||||||
|
A searchable index of records, this is currently only half implemented
|
||||||
|
but works enough to hold our remote catch index
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
nodeNickname,
|
nodeNickname: str,
|
||||||
daisyCryptography,
|
daisyCryptography,
|
||||||
prefill=[],
|
prefill: list = [],
|
||||||
indexedFields=[],
|
indexedFields: list = [],
|
||||||
autoIndex=True,
|
autoIndex: bool = True,
|
||||||
):
|
):
|
||||||
# TODO: Load from disk
|
"""
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
nodeNickname: str
|
||||||
|
Node nickname for record storage
|
||||||
|
|
||||||
|
daisyCryptography: Daisy.CryptographyUtil.SteelPetal
|
||||||
|
Record cryptography reference
|
||||||
|
|
||||||
|
prefill: list[dict]
|
||||||
|
List of objects to prefill the index with
|
||||||
|
|
||||||
|
indexedFields: list[str]
|
||||||
|
List of fields to index (what not to drop from a full record)
|
||||||
|
|
||||||
|
autoIndex: bool
|
||||||
|
Whether to automatically build the list of indexed fields present in the prefill objects
|
||||||
|
"""
|
||||||
if autoIndex:
|
if autoIndex:
|
||||||
if prefill != []:
|
if prefill != []:
|
||||||
if indexedFields == []:
|
if indexedFields == []:
|
||||||
|
@ -21,29 +43,36 @@ class Index(Daisy):
|
||||||
indexedFields.append(k)
|
indexedFields.append(k)
|
||||||
indexedFields = list(set(indexedFields))
|
indexedFields = list(set(indexedFields))
|
||||||
super().__init__(
|
super().__init__(
|
||||||
nodeNickname + ".index",
|
f"{nodeNickname}/daisy/{nodeNickname}.index",
|
||||||
daisyCryptography,
|
daisyCryptography,
|
||||||
prefillDict={"_index": prefill, "_fields": indexedFields},
|
prefillDict={"_index": prefill, "_fields": indexedFields},
|
||||||
)
|
)
|
||||||
|
|
||||||
def addEntry(self, entry):
|
def addEntry(self, entry: dict):
|
||||||
|
"""
|
||||||
|
Add a record to the index
|
||||||
|
|
||||||
|
entry: dict
|
||||||
|
Record to add to the index
|
||||||
|
"""
|
||||||
|
# TODO: Filter entry for only indexed fields
|
||||||
index = self.msg["_index"]
|
index = self.msg["_index"]
|
||||||
index.append(entry)
|
index.append(entry)
|
||||||
self.write(override={"_index": index})
|
self.write(override={"_index": index})
|
||||||
|
|
||||||
def search(self, keydict: dict, strict: bool = True):
|
def search(self, keydict: dict, strict: bool = True):
|
||||||
"""
|
"""
|
||||||
Search cache for record for records with values
|
Search index for record for records with values
|
||||||
|
|
||||||
keydict: dict
|
keydict: dict
|
||||||
Values to search for
|
Keys/Values to search for
|
||||||
|
|
||||||
strict: bool
|
strict: bool
|
||||||
Whether to require values match
|
Whether to require all keys/values match
|
||||||
"""
|
"""
|
||||||
results = []
|
results = []
|
||||||
for ob in self.msg["_index"]:
|
for ob in self.msg["_index"]:
|
||||||
if strict and type(ob) != str:
|
if strict and type(ob) is not str:
|
||||||
addcheck = False
|
addcheck = False
|
||||||
for k, v in keydict.items():
|
for k, v in keydict.items():
|
||||||
if k in ob.keys():
|
if k in ob.keys():
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
Index
|
||||||
|
=====
|
||||||
|
|
||||||
|
.. autoclass:: Daisy.Index.Index
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -2,5 +2,15 @@ from Daisy.Daisy import Daisy
|
||||||
|
|
||||||
|
|
||||||
class Ref(Daisy):
|
class Ref(Daisy):
|
||||||
def __init__(self, path, remoteNodeID):
|
"""
|
||||||
super().__init__(path, remote=remoteNodeID)
|
Reference to a remote record
|
||||||
|
|
||||||
|
metadata: dict
|
||||||
|
Data to fill record with, should only be metadata
|
||||||
|
|
||||||
|
path: str
|
||||||
|
Where to store data locally
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, metadata: dict, path: str, remoteNodeID: str):
|
||||||
|
super().__init__(path, remote=remoteNodeID, prefillDict=metadata)
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
Ref
|
||||||
|
===
|
||||||
|
|
||||||
|
.. autoclass:: Daisy.Ref.Ref
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -1,49 +0,0 @@
|
||||||
from watchdog.observers import Observer
|
|
||||||
from watchdog.events import FileSystemEventHandler
|
|
||||||
|
|
||||||
global garden
|
|
||||||
"""
|
|
||||||
Map of instances to list of signals
|
|
||||||
to be processed
|
|
||||||
"""
|
|
||||||
garden = {}
|
|
||||||
|
|
||||||
|
|
||||||
class Compound(FileSystemEventHandler):
|
|
||||||
"""
|
|
||||||
File system watcher to propagate disk changes
|
|
||||||
|
|
||||||
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Soil.py>`__
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cache, isCatch: bool = False):
|
|
||||||
"""
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
cache: Cache
|
|
||||||
Daisy cache to update
|
|
||||||
|
|
||||||
isCatch: bool
|
|
||||||
Is the cache for catchs
|
|
||||||
"""
|
|
||||||
self.cache = cache
|
|
||||||
self.isCatch = isCatch
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
def on_any_event(self, event):
|
|
||||||
"""
|
|
||||||
Called when a CRUD operation is performed on a record file
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
event
|
|
||||||
Event object provided by watchdog
|
|
||||||
"""
|
|
||||||
if not (".json" in event.src_path):
|
|
||||||
if not (".md" in event.src_path):
|
|
||||||
tpath = "/".join(event.src_path.split("/")[1:])
|
|
||||||
if tpath != "":
|
|
||||||
if self.isCatch:
|
|
||||||
self.cache.sget(tpath)
|
|
||||||
else:
|
|
||||||
self.cache.get(tpath).get()
|
|
|
@ -1,6 +0,0 @@
|
||||||
Soil: Daisy signal management
|
|
||||||
=============================
|
|
||||||
|
|
||||||
.. autoclass:: Daisy.Soil.Compound
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
|
@ -6,28 +6,71 @@ import traceback
|
||||||
|
|
||||||
logger = logging.getLogger("__main__." + __name__)
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
# TODO: Higher priority erros
|
|
||||||
|
|
||||||
class Store(Daisy):
|
class Store(Daisy):
|
||||||
"""
|
"""
|
||||||
Key value store
|
Key value store
|
||||||
|
|
||||||
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Store.py>`__
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Store.py>`_
|
||||||
"""
|
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
|
||||||
|
epehemeral: dict
|
||||||
|
Memory only records
|
||||||
|
"""
|
||||||
def __init__(self, store: str, path: str, nodeNickname: str, daisyCryptography):
|
def __init__(self, store: str, path: str, nodeNickname: str, daisyCryptography):
|
||||||
fpath = "daisy/{0}/{1}".format(path, nodeNickname)
|
"""
|
||||||
cpath = "{0}/{1}/{2}".format(path, nodeNickname, store)
|
Parameters
|
||||||
|
----------
|
||||||
|
store: str
|
||||||
|
Store name/filename
|
||||||
|
|
||||||
|
path: str
|
||||||
|
Folder record should be in
|
||||||
|
|
||||||
|
nodeNickname: str
|
||||||
|
Node nickname for record storage
|
||||||
|
|
||||||
|
daisyCryptography: Daisy.CryptographyUtil.SteelPetal
|
||||||
|
Record cryptography reference
|
||||||
|
"""
|
||||||
|
fpath = f"{nodeNickname}/daisy/{path}"
|
||||||
|
cpath = f"{fpath}/{store}"
|
||||||
if not os.path.exists(fpath):
|
if not os.path.exists(fpath):
|
||||||
os.mkdir(fpath)
|
os.mkdir(fpath)
|
||||||
super().__init__("daisy/" + cpath, daisyCryptography)
|
super().__init__(cpath, daisyCryptography)
|
||||||
self.ephemeral = {}
|
self.ephemeral = {}
|
||||||
|
|
||||||
def createEmpty(self, key):
|
def createEmpty(self, key: str):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
Create empty record at the given key
|
||||||
|
|
||||||
|
key: str
|
||||||
|
Key to create empty record at
|
||||||
|
"""
|
||||||
self.msg[key] = {}
|
self.msg[key] = {}
|
||||||
|
|
||||||
# TODO: Update usages of update where necessary to keep out of mem
|
|
||||||
def update(self, entry: str, data, recur: bool = True, write=True):
|
def update(self, entry: str, data, recur: bool = True, write=True):
|
||||||
|
"""
|
||||||
|
Update given record
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
entry: str
|
||||||
|
Key to update record of
|
||||||
|
|
||||||
|
data
|
||||||
|
Data to update record with
|
||||||
|
|
||||||
|
recur: bool
|
||||||
|
Whether to iterate over data
|
||||||
|
|
||||||
|
write: bool
|
||||||
|
Whether record is ephemeral
|
||||||
|
"""
|
||||||
if write:
|
if write:
|
||||||
if recur:
|
if recur:
|
||||||
if entry not in self.msg.keys():
|
if entry not in self.msg.keys():
|
||||||
|
@ -47,19 +90,29 @@ class Store(Daisy):
|
||||||
self.ephemeral[entry] = data
|
self.ephemeral[entry] = data
|
||||||
|
|
||||||
def getRecord(self, key: str, ephemeral=False):
|
def getRecord(self, key: str, ephemeral=False):
|
||||||
logger.log(30, key)
|
"""
|
||||||
|
Get record at key
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
key: str
|
||||||
|
|
||||||
|
ephemeral: bool
|
||||||
|
Whether key is only in memory, used for session cryptography credentials currently
|
||||||
|
"""
|
||||||
|
logger.debug(key)
|
||||||
try:
|
try:
|
||||||
if ephemeral:
|
if ephemeral:
|
||||||
if key in self.ephemeral.keys():
|
if key in self.ephemeral.keys():
|
||||||
return self.ephemeral[key]
|
return self.ephemeral[key]
|
||||||
else:
|
else:
|
||||||
logger.log(20, "Record does not exist")
|
logger.info("Record does not exist")
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
if key in self.get().keys():
|
if key in self.get().keys():
|
||||||
return self.get()[key]
|
return self.get()[key]
|
||||||
else:
|
else:
|
||||||
logger.log(20, "Record does not exist")
|
logger.info("Record does not exist")
|
||||||
return False
|
return False
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(30, traceback.format_exc())
|
logger.warning(traceback.format_exc())
|
||||||
|
|
|
@ -30,7 +30,7 @@ class Header(Packet):
|
||||||
Whether a response should be sent when the message completes reception (TODO)
|
Whether a response should be sent when the message completes reception (TODO)
|
||||||
|
|
||||||
pAction: int
|
pAction: int
|
||||||
3 digit (maximum) pAction ID for mapping precise actions within a protocol (TODO)
|
3 digit (maximum) pAction ID for mapping precise actions within a protocol
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
@ -42,12 +42,23 @@ class Header(Packet):
|
||||||
sourceNode: int,
|
sourceNode: int,
|
||||||
recipient: int,
|
recipient: int,
|
||||||
recipientNode: int,
|
recipientNode: int,
|
||||||
subpacket: bool = False,
|
|
||||||
wantFullResponse: bool = False,
|
wantFullResponse: bool = False,
|
||||||
packetsClass: int = 0,
|
packetsClass: int = 0,
|
||||||
pAction: int = -1,
|
pAction: int = -1,
|
||||||
target=True,
|
target=True,
|
||||||
):
|
):
|
||||||
|
"""
|
||||||
|
Arguments
|
||||||
|
---------
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
packetsClass: int
|
||||||
|
Integer ID matching the class of the message
|
||||||
|
|
||||||
|
target
|
||||||
|
Whether the message is being sent to a target, if so, where
|
||||||
|
"""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
b"", packetsID=packetsID, packetCount=packetCount, packetsClass=packetsClass
|
b"", packetsID=packetsID, packetCount=packetCount, packetsClass=packetsClass
|
||||||
)
|
)
|
||||||
|
@ -60,7 +71,6 @@ class Header(Packet):
|
||||||
else:
|
else:
|
||||||
self.recipient = -1
|
self.recipient = -1
|
||||||
self.recipientNode = -1
|
self.recipientNode = -1
|
||||||
# TODO: Populating with submessage ids
|
|
||||||
self.submessages = []
|
self.submessages = []
|
||||||
self.wantFullResponse = wantFullResponse
|
self.wantFullResponse = wantFullResponse
|
||||||
self.pAction = pAction
|
self.pAction = pAction
|
||||||
|
@ -69,7 +79,7 @@ class Header(Packet):
|
||||||
|
|
||||||
def usePreset(self, path: str, daisyCryptography):
|
def usePreset(self, path: str, daisyCryptography):
|
||||||
"""
|
"""
|
||||||
Add preset fields to the packet
|
Add preset fields to the packet, currently unused
|
||||||
"""
|
"""
|
||||||
preset = Daisy(path, daisyCryptography)
|
preset = Daisy(path, daisyCryptography)
|
||||||
for key in preset.get().keys():
|
for key in preset.get().keys():
|
||||||
|
|
|
@ -44,7 +44,6 @@ class Message:
|
||||||
primaryMessage=None,
|
primaryMessage=None,
|
||||||
pskEncrypt=False
|
pskEncrypt=False
|
||||||
):
|
):
|
||||||
# TODO: PSK for usage prior to credentials
|
|
||||||
"""
|
"""
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
|
@ -57,20 +56,41 @@ class Message:
|
||||||
senderDisplayName: int
|
senderDisplayName: int
|
||||||
3 digit (maximum) ID for mapping display names to a given user
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
recipient: int
|
recipient: int
|
||||||
6 digit (maximum) node or peer ID
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
recipientNode: int
|
recipientNode: int
|
||||||
6 digit (maximum) node ID to route the packet to
|
6 digit (maximum) node ID to route the packet to
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
packetsClass: int
|
||||||
|
Which protocol the packets are using
|
||||||
|
|
||||||
|
pAction: int
|
||||||
|
3 digit (maximum) pAction ID for mapping precise actions within a protocol
|
||||||
|
|
||||||
dataSize: int
|
dataSize: int
|
||||||
Size to cut the bytesObject into per packet
|
Size to cut the bytesObject into per packet
|
||||||
|
|
||||||
wantFullResponse: bool
|
wantFullResponse: bool
|
||||||
Whether to send a response when the message has completed reception (TODO: Kill all retries for associated packets when received)
|
Whether to send a response when the message has completed reception (TODO: Kill all retries for associated packets when received)
|
||||||
|
|
||||||
packetsClass: int
|
target
|
||||||
Which protocol the packets are using
|
Whether the message is being sent to a target, if so, where
|
||||||
|
|
||||||
|
subMessage: bool
|
||||||
|
Whether this is a submessage
|
||||||
|
|
||||||
|
primaryMessage
|
||||||
|
Primary message this is a submessage to, if this is a submessage
|
||||||
|
|
||||||
|
pskEncrypt: bool
|
||||||
|
Whether to encrypt the message with the pre shared key
|
||||||
"""
|
"""
|
||||||
self.recipientNode = recipientNode
|
self.recipientNode = recipientNode
|
||||||
self.target = target
|
self.target = target
|
||||||
|
@ -90,28 +110,25 @@ class Message:
|
||||||
)
|
)
|
||||||
self.packets = packets
|
self.packets = packets
|
||||||
else:
|
else:
|
||||||
# Data passed in by peers should already have been e2ee encrypted by SubtleCrypto
|
# TODO: Data passed in by peers should already have been e2ee encrypted by SubtleCrypto
|
||||||
# Transport encryption
|
|
||||||
# bytesObject = lzma.compress(bytesObject, str(recipientNode).zfill(6), isDict=False)
|
|
||||||
if subMessage == False:
|
if subMessage == False:
|
||||||
bytesObject, nonce, tag = cryptographyInfo.encrypt(
|
bytesObject, nonce, tag = cryptographyInfo.encrypt(
|
||||||
bytesObject, str(recipientNode).zfill(6), isDict=False, pskEncrypt=pskEncrypt
|
bytesObject, str(recipientNode).zfill(6), isDict=False, pskEncrypt=pskEncrypt
|
||||||
)
|
)
|
||||||
logger.log(10, bytesObject)
|
# logger.debug(bytesObject)
|
||||||
self.nonce = nonce
|
self.nonce = nonce
|
||||||
self.tag = tag
|
self.tag = tag
|
||||||
packets = []
|
packets = []
|
||||||
self.packetsID = random.randrange(0, 999999)
|
self.packetsID = random.randrange(0, 999999)
|
||||||
pnum = 1
|
pnum = 1
|
||||||
# if subMessage:
|
|
||||||
dataSize = 80
|
dataSize = 80
|
||||||
blen = math.ceil(len(bytesObject) / dataSize)
|
blen = math.ceil(len(bytesObject) / dataSize)
|
||||||
tb = b""
|
tb = b""
|
||||||
for it in range(blen):
|
for it in range(blen):
|
||||||
if it >= (blen - 1):
|
if it >= (blen - 1):
|
||||||
b = bytesObject[it * dataSize :]
|
b = bytesObject[it * dataSize:]
|
||||||
else:
|
else:
|
||||||
b = bytesObject[it * dataSize : (it * dataSize + dataSize)]
|
b = bytesObject[it * dataSize: (it * dataSize + dataSize)]
|
||||||
if subMessage:
|
if subMessage:
|
||||||
packets.append(
|
packets.append(
|
||||||
p.Packet(
|
p.Packet(
|
||||||
|
@ -152,7 +169,7 @@ class Message:
|
||||||
|
|
||||||
if subMessage:
|
if subMessage:
|
||||||
pnum -= 1
|
pnum -= 1
|
||||||
|
|
||||||
for it in range(pnum):
|
for it in range(pnum):
|
||||||
packet = msgpack.loads(packets[it].dump())
|
packet = msgpack.loads(packets[it].dump())
|
||||||
packet["packetCount"] = pnum
|
packet["packetCount"] = pnum
|
||||||
|
@ -168,31 +185,45 @@ class Message:
|
||||||
|
|
||||||
def reassemble(self, completedMessage: dict, cryptographyInfo, subMessage=False, yctx=None, packetCount=None):
|
def reassemble(self, completedMessage: dict, cryptographyInfo, subMessage=False, yctx=None, packetCount=None):
|
||||||
"""
|
"""
|
||||||
Reassemble packets from a completed message in `Sponge.base`
|
Reassemble packets from a completed message in `Sponge.base`, meant to be used without instantiation
|
||||||
|
|
||||||
|
Arguments
|
||||||
|
---------
|
||||||
|
completedMessage: dict
|
||||||
|
All parts of the message and submessage
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
subMessage: bool
|
||||||
|
Whether this is a submessage
|
||||||
|
|
||||||
|
yctx
|
||||||
|
Message parsing context
|
||||||
|
|
||||||
|
packetCount
|
||||||
|
Number of packets
|
||||||
"""
|
"""
|
||||||
data = b""
|
data = b""
|
||||||
sourceNode = None
|
sourceNode = None
|
||||||
# TODO: Fix reassembly for primary
|
|
||||||
if subMessage:
|
if subMessage:
|
||||||
sourceNode = yctx["sourceNode"]["val"]
|
sourceNode = yctx["sourceNode"]["val"]
|
||||||
for it in range(1, packetCount+1):
|
for it in range(1, packetCount+1):
|
||||||
data += completedMessage["data"][completedMessage["dataOrder"].index(it)]
|
data += completedMessage["data"][completedMessage["dataOrder"].index(it)]
|
||||||
data = msgpack.loads(lzma.decompress(data))
|
data = msgpack.loads(lzma.decompress(data))
|
||||||
logger.log(10, data)
|
# logger.debug(data)
|
||||||
logger.log(10, completedMessage["data"])
|
# logger.debug(completedMessage["data"])
|
||||||
logger.log(10, completedMessage["dataOrder"])
|
# logger.debug(completedMessage["dataOrder"])
|
||||||
else:
|
else:
|
||||||
# TODO: Cryptography setup
|
|
||||||
packetCount = int(completedMessage.yctx["packetCount"]["val"])
|
packetCount = int(completedMessage.yctx["packetCount"]["val"])
|
||||||
sourceNode = completedMessage.yctx["sourceNode"]["val"]
|
sourceNode = completedMessage.yctx["sourceNode"]["val"]
|
||||||
logger.log(10, completedMessage.data)
|
# logger.debug(completedMessage.data)
|
||||||
for it in range(1, packetCount):
|
for it in range(1, packetCount):
|
||||||
if it in completedMessage.dataOrder:
|
if it in completedMessage.dataOrder:
|
||||||
data += completedMessage.data[completedMessage.dataOrder.index(it)]
|
data += completedMessage.data[completedMessage.dataOrder.index(it)]
|
||||||
logger.log(10, "pre decrypt")
|
# logger.debug("pre decrypt")
|
||||||
logger.log(10, data)
|
# logger.debug(data)
|
||||||
data = cryptographyInfo.decrypt(
|
data = cryptographyInfo.decrypt(
|
||||||
data, sourceNode, completedMessage.nonce, completedMessage.tag
|
data, sourceNode, completedMessage.nonce, completedMessage.tag
|
||||||
)
|
)
|
||||||
# data = msgpack.loads(lzma.decompress(data))
|
|
||||||
return data
|
return data
|
||||||
|
|
|
@ -3,6 +3,10 @@ import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
class Bubble(Message):
|
class Bubble(Message):
|
||||||
|
"""
|
||||||
|
Send data from peer to peer
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
sender,
|
sender,
|
||||||
|
@ -13,6 +17,30 @@ class Bubble(Message):
|
||||||
cryptographyInfo,
|
cryptographyInfo,
|
||||||
data,
|
data,
|
||||||
):
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient
|
||||||
|
6 digit (maximum) recipient peer ID
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
6 digit (maxmium) recipient node ID
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
data
|
||||||
|
Data to send to peer
|
||||||
|
"""
|
||||||
bytesOb = Packets.Message.dict2bytes({"data": data, "recipient": recipient, "target": "bubble"})
|
bytesOb = Packets.Message.dict2bytes({"data": data, "recipient": recipient, "target": "bubble"})
|
||||||
super().__init__(
|
super().__init__(
|
||||||
bytesOb,
|
bytesOb,
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
bubble.Bubble
|
||||||
|
=============
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.bubble.Bubble.Bubble
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -3,6 +3,9 @@ import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
class IndexSync(Message):
|
class IndexSync(Message):
|
||||||
|
"""
|
||||||
|
Sync indices of Catchs across nodes
|
||||||
|
"""
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
sender,
|
sender,
|
||||||
|
@ -14,6 +17,33 @@ class IndexSync(Message):
|
||||||
index,
|
index,
|
||||||
target=False
|
target=False
|
||||||
):
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient
|
||||||
|
6 digit (maximum) recipient peer ID
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
6 digit (maxmium) recipient node ID
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
index: dict
|
||||||
|
Index of catch's to sync across nodes
|
||||||
|
|
||||||
|
target: bool
|
||||||
|
Whether to send this to a specific target (str) or just broadcast (False)
|
||||||
|
"""
|
||||||
bytesOb = Packets.Message.dict2bytes({"index": index})
|
bytesOb = Packets.Message.dict2bytes({"index": index})
|
||||||
super().__init__(
|
super().__init__(
|
||||||
bytesOb,
|
bytesOb,
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
catch.IndexSync
|
||||||
|
===============
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.catch.IndexSync.IndexSync
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -3,6 +3,10 @@ import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
class CatchRequest(Message):
|
class CatchRequest(Message):
|
||||||
|
"""
|
||||||
|
Request Catch (website) from another node
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
sender,
|
sender,
|
||||||
|
@ -16,6 +20,39 @@ class CatchRequest(Message):
|
||||||
fins,
|
fins,
|
||||||
pskEncrypt=False
|
pskEncrypt=False
|
||||||
):
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient
|
||||||
|
6 digit (maximum) recipient peer ID
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
6 digit (maxmium) recipient node ID
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
head: str
|
||||||
|
First part of the Catch (4 characters max)
|
||||||
|
|
||||||
|
body: str
|
||||||
|
Second part of the Catch (8 characters max)
|
||||||
|
|
||||||
|
fins: list[str]
|
||||||
|
Last part of the Catch (6 characters max each)
|
||||||
|
|
||||||
|
pskEncrypt: bool
|
||||||
|
Whether to encrypt with PSK
|
||||||
|
"""
|
||||||
bytesOb = Packets.Message.dict2bytes({"head": head, "body": body, "fins": fins, "recipient": sender, "recipientNode": sourceNode})
|
bytesOb = Packets.Message.dict2bytes({"head": head, "body": body, "fins": fins, "recipient": sender, "recipientNode": sourceNode})
|
||||||
super().__init__(
|
super().__init__(
|
||||||
bytesOb,
|
bytesOb,
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
catch.Request
|
||||||
|
=============
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.catch.Request.CatchRequest
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -3,6 +3,10 @@ import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
class CatchResponse(Message):
|
class CatchResponse(Message):
|
||||||
|
"""
|
||||||
|
Send local Catch (website) to user who requested it
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
sender,
|
sender,
|
||||||
|
@ -14,6 +18,33 @@ class CatchResponse(Message):
|
||||||
html,
|
html,
|
||||||
pskEncrypt=False
|
pskEncrypt=False
|
||||||
):
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient
|
||||||
|
6 digit (maximum) recipient peer ID
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
6 digit (maxmium) recipient node ID
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
html
|
||||||
|
Contents of Catch to send back
|
||||||
|
|
||||||
|
pskEncrypt:
|
||||||
|
Whether to encrypt with PSK
|
||||||
|
"""
|
||||||
bytesOb = Packets.Message.dict2bytes({"html": html, "recipient": recipient, "target": "catch"})
|
bytesOb = Packets.Message.dict2bytes({"html": html, "recipient": recipient, "target": "catch"})
|
||||||
super().__init__(
|
super().__init__(
|
||||||
bytesOb,
|
bytesOb,
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
catch.Response
|
||||||
|
==============
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.catch.Response.CatchResponse
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -1,20 +1,45 @@
|
||||||
from Packets.Message import Message
|
from Packets.Message import Message
|
||||||
import Packets.Message
|
import Packets.Message
|
||||||
|
|
||||||
# TODO: Send with psk encryption
|
|
||||||
|
|
||||||
class Handshake(Message):
|
class Handshake(Message):
|
||||||
|
"""
|
||||||
|
Provides the ephemeral key for session encryption
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, sender, senderID, recipient, recipientNode, cryptographyInfo, onodeID, sourceNode
|
self, sender, senderID, recipient, recipientNode, cryptographyInfo, onodeID, sourceNode
|
||||||
):
|
):
|
||||||
publicKey = None
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient
|
||||||
|
6 digit (maximum) recipient peer ID
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
6 digit (maxmium) recipient node ID
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
onodeID
|
||||||
|
Node identifier for current node
|
||||||
|
|
||||||
|
sourceNode
|
||||||
|
Source of request
|
||||||
|
"""
|
||||||
ephemeralKey = None
|
ephemeralKey = None
|
||||||
record = cryptographyInfo.getRecord("key", "self")
|
record = cryptographyInfo.getRecord("key", "self")
|
||||||
if record != False:
|
if record:
|
||||||
if "publicKey" in record.keys():
|
|
||||||
publicKey = record["publicKey"]
|
|
||||||
else:
|
|
||||||
raise Exception("Public key missing for node")
|
|
||||||
if "ourEphemeralKey" in record.keys():
|
if "ourEphemeralKey" in record.keys():
|
||||||
ephemeralKey = record["ourEphemeralKey"]
|
ephemeralKey = record["ourEphemeralKey"]
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
cryptography.Handshake
|
||||||
|
======================
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.cryptography.Handshake.Handshake
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -3,6 +3,10 @@ import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
class HopperRequest(Message):
|
class HopperRequest(Message):
|
||||||
|
"""
|
||||||
|
Proxy request to main internet from remote node
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
sender,
|
sender,
|
||||||
|
@ -10,11 +14,42 @@ class HopperRequest(Message):
|
||||||
sourceNode,
|
sourceNode,
|
||||||
recipient,
|
recipient,
|
||||||
recipientNode,
|
recipientNode,
|
||||||
url,
|
url: str,
|
||||||
params,
|
params: dict,
|
||||||
method,
|
method: str,
|
||||||
cryptographyInfo,
|
cryptographyInfo,
|
||||||
):
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient
|
||||||
|
6 digit (maximum) recipient peer ID
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
6 digit (maxmium) recipient node ID
|
||||||
|
|
||||||
|
url: str
|
||||||
|
URL to parse
|
||||||
|
|
||||||
|
params: dict
|
||||||
|
Parameters to add to the request for the URL
|
||||||
|
|
||||||
|
method: str
|
||||||
|
Method to use for request (GET/POST currently)
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
"""
|
||||||
bytesOb = Packets.Message.dict2bytes({"url": url, "parameters": params, "method": method, "recipient": sender, "recipientNode": sourceNode})
|
bytesOb = Packets.Message.dict2bytes({"url": url, "parameters": params, "method": method, "recipient": sender, "recipientNode": sourceNode})
|
||||||
super().__init__(
|
super().__init__(
|
||||||
bytesOb,
|
bytesOb,
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
hopper.Request
|
||||||
|
==============
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.hopper.Request.HopperRequest
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -3,12 +3,38 @@ import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
class HopperResponse(Message):
|
class HopperResponse(Message):
|
||||||
|
"""
|
||||||
|
Send proxied request back to requester
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, sender, senderID, sourceNode, recipient, recipientNode, response, cryptographyInfo
|
self, sender, senderID, sourceNode, recipient, recipientNode, response, cryptographyInfo
|
||||||
):
|
):
|
||||||
bytesOb = Packets.Message.dict2bytes({"res": response, "recipient": recipient, "target": "hopper"})
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
# bytesOb = cryptographyInfo.encrypt(bytesOb, recipientNode)
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient
|
||||||
|
6 digit (maximum) recipient peer ID
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
6 digit (maxmium) recipient node ID
|
||||||
|
|
||||||
|
response
|
||||||
|
Data from proxied request
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
"""
|
||||||
|
bytesOb = Packets.Message.dict2bytes({"res": response, "recipient": recipient, "target": "hopper"})
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
bytesOb,
|
bytesOb,
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
hopper.Response
|
||||||
|
===============
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.hopper.Response.HopperResponse
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -5,9 +5,12 @@ import logging
|
||||||
|
|
||||||
logger = logging.getLogger("__main__." + __name__)
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
# TODO: Add public key
|
|
||||||
|
|
||||||
class AnnounceMessage(Message):
|
class AnnounceMessage(Message):
|
||||||
|
"""
|
||||||
|
Announce the network map details and public key of the node for discovery
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
sender,
|
sender,
|
||||||
|
@ -16,12 +19,30 @@ class AnnounceMessage(Message):
|
||||||
cryptographyInfo,
|
cryptographyInfo,
|
||||||
mapping,
|
mapping,
|
||||||
):
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
mapping: dict
|
||||||
|
Network map
|
||||||
|
"""
|
||||||
mapping["publicKey"] = cryptographyInfo.getRecord("key", "self")["publicKey"]
|
mapping["publicKey"] = cryptographyInfo.getRecord("key", "self")["publicKey"]
|
||||||
recipient = -1
|
recipient = -1
|
||||||
recipientNode = -1
|
recipientNode = -1
|
||||||
bytesOb = Packets.Message.dict2bytes(mapping)
|
bytesOb = Packets.Message.dict2bytes(mapping)
|
||||||
logger.log(10, "Mapping bytes")
|
# logger.debug(10, "Mapping bytes")
|
||||||
logger.log(10, bytesOb)
|
# logger.debug(10, bytesOb)
|
||||||
super().__init__(
|
super().__init__(
|
||||||
bytesOb,
|
bytesOb,
|
||||||
sender,
|
sender,
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
map.Announce
|
||||||
|
============
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.map.Announce.AnnounceMessage
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -34,7 +34,7 @@ class Packet:
|
||||||
packetsClass: int = -1,
|
packetsClass: int = -1,
|
||||||
primaryMessage=None,
|
primaryMessage=None,
|
||||||
):
|
):
|
||||||
if packetsID == False:
|
if not packetsID:
|
||||||
self.packetsID, self.packetNumber, self.data, self.packetsClass = (
|
self.packetsID, self.packetNumber, self.data, self.packetsClass = (
|
||||||
self.parsePayload(data)
|
self.parsePayload(data)
|
||||||
)
|
)
|
||||||
|
@ -69,8 +69,7 @@ class Packet:
|
||||||
}
|
}
|
||||||
if res["data"] == "":
|
if res["data"] == "":
|
||||||
res.pop("data")
|
res.pop("data")
|
||||||
if self.primaryMessage != None:
|
if self.primaryMessage is not None:
|
||||||
res["primaryMessage"] = self.primaryMessage
|
res["primaryMessage"] = self.primaryMessage
|
||||||
ores = msgpack.dumps(res)
|
ores = msgpack.dumps(res)
|
||||||
# logging.log(20, "Packet size: " + str(sys.getsizeof(ores)))
|
|
||||||
return ores
|
return ores
|
||||||
|
|
|
@ -2,9 +2,6 @@ from .Packet import Packet
|
||||||
import msgpack
|
import msgpack
|
||||||
import lzma
|
import lzma
|
||||||
|
|
||||||
# TODO: Instantiation
|
|
||||||
# TODO: Packet template loading
|
|
||||||
|
|
||||||
|
|
||||||
class SinglePacket(Packet):
|
class SinglePacket(Packet):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -5,7 +5,12 @@ import logging
|
||||||
|
|
||||||
logger = logging.getLogger("__main__." + __name__)
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
|
|
||||||
class SubMessage(Message):
|
class SubMessage(Message):
|
||||||
|
"""
|
||||||
|
SubMessage to a primary message, enables us to send more/dynamic data
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
sender,
|
sender,
|
||||||
|
@ -20,9 +25,54 @@ class SubMessage(Message):
|
||||||
target=True,
|
target=True,
|
||||||
primaryMessage=None
|
primaryMessage=None
|
||||||
):
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
bytesObject: bytes
|
||||||
|
Bytes to split into packets
|
||||||
|
|
||||||
|
sender: int
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderDisplayName: int
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient: int
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
recipientNode: int
|
||||||
|
6 digit (maximum) node ID to route the packet to
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.DHEFern
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
packetsClass: int
|
||||||
|
Which protocol the packets are using
|
||||||
|
|
||||||
|
pAction: int
|
||||||
|
3 digit (maximum) pAction ID for mapping precise actions within a protocol
|
||||||
|
|
||||||
|
dataSize: int
|
||||||
|
Size to cut the bytesObject into per packet
|
||||||
|
|
||||||
|
wantFullResponse: bool
|
||||||
|
Whether to send a response when the message has completed reception (TODO: Kill all retries for associated packets when received)
|
||||||
|
|
||||||
|
target
|
||||||
|
Whether the message is being sent to a target, if so, where
|
||||||
|
|
||||||
|
subMessage: bool
|
||||||
|
Whether this is a submessage
|
||||||
|
|
||||||
|
primaryMessage
|
||||||
|
Primary message this is a submessage to, if this is a submessage
|
||||||
|
"""
|
||||||
bytesOb = Packets.Message.dict2bytes(data)
|
bytesOb = Packets.Message.dict2bytes(data)
|
||||||
logger.log(10, "Submessage bytes")
|
# logger.debug("Submessage bytes")
|
||||||
logger.log(10, bytesOb)
|
# logger.debug(bytesOb)
|
||||||
super().__init__(
|
super().__init__(
|
||||||
bytesOb,
|
bytesOb,
|
||||||
sender,
|
sender,
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
SubPacket: Packets for submessages
|
SubPacket: Packets for submessages
|
||||||
==================================
|
==================================
|
||||||
|
|
||||||
.. autoclass:: Packets.SubPacket.SubPacket
|
.. autoclass:: Packets.SubMessage.SubMessage
|
||||||
:members:
|
:members:
|
||||||
|
|
|
@ -1,4 +1,35 @@
|
||||||
class Action:
|
class Action:
|
||||||
|
"""
|
||||||
|
Generic action class for triggering actions from sub processes on the main thread
|
||||||
|
|
||||||
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Services/Action.py>`_
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
|
||||||
|
action: str
|
||||||
|
Action to run
|
||||||
|
|
||||||
|
data: dict
|
||||||
|
Data to pass to action
|
||||||
|
|
||||||
|
sender: str
|
||||||
|
Sender identifier
|
||||||
|
|
||||||
|
senderID: str
|
||||||
|
Sender second level identifier
|
||||||
|
|
||||||
|
sourceNode: str
|
||||||
|
Sending node
|
||||||
|
|
||||||
|
recipient: str
|
||||||
|
Peer identifier to route to
|
||||||
|
|
||||||
|
recipientNode:
|
||||||
|
Intended destination node identifier
|
||||||
|
"""
|
||||||
|
# TODO: Utilize the attributes here to skip over doing it manually
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
action,
|
action,
|
||||||
|
@ -11,19 +42,19 @@ class Action:
|
||||||
):
|
):
|
||||||
self.action = action
|
self.action = action
|
||||||
self.data = data
|
self.data = data
|
||||||
if sender != None:
|
if sender is not None:
|
||||||
self.data["sender"] = sender
|
self.data["sender"] = sender
|
||||||
|
|
||||||
if senderID != None:
|
if senderID is not None:
|
||||||
self.data["senderID"] = senderID
|
self.data["senderID"] = senderID
|
||||||
|
|
||||||
if sourceNode != None:
|
if sourceNode is not None:
|
||||||
self.data["sourceNode"] = sourceNode
|
self.data["sourceNode"] = sourceNode
|
||||||
|
|
||||||
if recipient != None:
|
if recipient is not None:
|
||||||
self.data["recipient"] = recipient
|
self.data["recipient"] = recipient
|
||||||
|
|
||||||
if recipientNode != None:
|
if recipientNode is not None:
|
||||||
self.data["recipientNode"] = recipientNode
|
self.data["recipientNode"] = recipientNode
|
||||||
|
|
||||||
def getAction(self):
|
def getAction(self):
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
Services.Action
|
||||||
|
===============
|
||||||
|
|
||||||
|
.. autoclass:: Services.Action.Action
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -1,10 +1,11 @@
|
||||||
from uuid import uuid4
|
# NOTE: Local imports
|
||||||
import Components.hopper as hopper
|
import Components.hopper as hopper
|
||||||
from Packets.Messages.Protocols.catch.Request import CatchRequest
|
from Packets.Messages.Protocols.catch.Request import CatchRequest
|
||||||
from Packets.Messages.Protocols.catch.IndexSync import IndexSync
|
from Packets.Messages.Protocols.catch.IndexSync import IndexSync
|
||||||
from Packets.Messages.Protocols.hopper.Request import HopperRequest
|
from Packets.Messages.Protocols.hopper.Request import HopperRequest
|
||||||
from Packets.Messages.Protocols.bubble.Bubble import Bubble
|
from Packets.Messages.Protocols.bubble.Bubble import Bubble
|
||||||
|
|
||||||
|
# NOTE: Server imports
|
||||||
from microdot import Microdot
|
from microdot import Microdot
|
||||||
from microdot import send_file
|
from microdot import send_file
|
||||||
from microdot.websocket import with_websocket, WebSocketError
|
from microdot.websocket import with_websocket, WebSocketError
|
||||||
|
@ -12,6 +13,7 @@ from microdot import Request
|
||||||
from microdot.jinja import Template
|
from microdot.jinja import Template
|
||||||
from microdot.session import Session, with_session
|
from microdot.session import Session, with_session
|
||||||
|
|
||||||
|
# NOTE: Generic imports
|
||||||
import random
|
import random
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
|
@ -19,13 +21,11 @@ import logging
|
||||||
import traceback
|
import traceback
|
||||||
import uuid
|
import uuid
|
||||||
import re
|
import re
|
||||||
|
from uuid import uuid4
|
||||||
import msgpack
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger("__main__." + __name__)
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
# Enable 500 kB files in the webui
|
# Enable 1 mB files in the webui
|
||||||
Request.max_content_length = 1024 * 1024 * 0.5
|
Request.max_content_length = 1024 * 1024 * 0.5
|
||||||
Request.max_body_length = 1024 * 1024 * 0.5
|
Request.max_body_length = 1024 * 1024 * 0.5
|
||||||
Request.max_readline = 1024 * 1024
|
Request.max_readline = 1024 * 1024
|
||||||
|
@ -39,11 +39,8 @@ class Server:
|
||||||
|
|
||||||
Attributes
|
Attributes
|
||||||
----------
|
----------
|
||||||
cLog
|
transceiver: Transceiver.Transceiver.Transceiver
|
||||||
Reference to `run.Node.cLog` for logging
|
Reference to our `Transceiver.Transceiver.Transceiver` instance
|
||||||
|
|
||||||
transmitter: Transmission.transmission.Transmitter
|
|
||||||
Reference to our `Transmission.transmission.Transmitter` instance
|
|
||||||
|
|
||||||
network: Siph.Network.Network
|
network: Siph.Network.Network
|
||||||
Reference to our `Siph.Network.Network`
|
Reference to our `Siph.Network.Network`
|
||||||
|
@ -71,6 +68,30 @@ class Server:
|
||||||
remoteCatchIndex,
|
remoteCatchIndex,
|
||||||
cache,
|
cache,
|
||||||
):
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
transceiver: Transceiver.Transceiver.Transceiver
|
||||||
|
Reference to our `Transceiver.Transceiver.Transceiver` instance
|
||||||
|
|
||||||
|
catch: Daisy.Catch.Catch
|
||||||
|
Reference to our Catch Cache instance to pull from for serving Catchs
|
||||||
|
|
||||||
|
nodeID: str
|
||||||
|
String converted PierMesh node ID
|
||||||
|
|
||||||
|
network: Siph.Network.Network
|
||||||
|
Reference to our `Siph.Network.Network`
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting messagei
|
||||||
|
|
||||||
|
remoteCacheIndex: Daisy.Index.Index
|
||||||
|
Reference to our index of remote Catch's to pull from
|
||||||
|
|
||||||
|
cache: Daisy.Cache.Cache
|
||||||
|
Reference to our on disk files
|
||||||
|
"""
|
||||||
self.transceiver = transceiver
|
self.transceiver = transceiver
|
||||||
self.network = network
|
self.network = network
|
||||||
self.network.syncaddLookup(onodeID, self.transceiver.interface.localNode.nodeNum)
|
self.network.syncaddLookup(onodeID, self.transceiver.interface.localNode.nodeNum)
|
||||||
|
@ -92,7 +113,7 @@ class Server:
|
||||||
Static resources endpoint
|
Static resources endpoint
|
||||||
"""
|
"""
|
||||||
if ".." in path:
|
if ".." in path:
|
||||||
# directory traversal is not allowed
|
# NOTE: Directory traversal is not allowed
|
||||||
return "Not found", 404
|
return "Not found", 404
|
||||||
return send_file("Splash/build/res/" + path, max_age=86400)
|
return send_file("Splash/build/res/" + path, max_age=86400)
|
||||||
|
|
||||||
|
@ -103,42 +124,49 @@ class Server:
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
return send_file("Splash/build/index/index.html")
|
return send_file("Splash/build/index/index.html")
|
||||||
except Exception as e:
|
except Exception:
|
||||||
logger.error(traceback.format_exc())
|
logger.error(traceback.format_exc())
|
||||||
return "Server error", 500
|
return "Server error", 500
|
||||||
|
|
||||||
@self.app.route("/hop/<tmpResourceID>")
|
@self.app.route("/hop/<tmpResourceID>")
|
||||||
async def hop(request, tmpResourceID):
|
async def hop(request, tmpResourceID):
|
||||||
|
"""
|
||||||
|
Static handler to serve files from Hopper requests temporarily
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
return self.cache.get("tmp/hopper/" + tmpResourceID).get()["html"]
|
return self.cache.get("tmp/hopper/" + tmpResourceID).get()["html"]
|
||||||
except Exception as e:
|
except Exception:
|
||||||
logger.error(traceback.format_exc())
|
logger.error(traceback.format_exc())
|
||||||
return "Server error", 500
|
return "Server error", 500
|
||||||
|
|
||||||
@self.app.route("/api/json")
|
@self.app.route("/api/json")
|
||||||
async def api(request):
|
async def api(request):
|
||||||
|
"""
|
||||||
|
Currently just a test json api endpoint that returns {"hello": "world"}
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
return {"hello": "world"}
|
return {"hello": "world"}
|
||||||
except Exception as e:
|
except Exception:
|
||||||
logger.error(traceback.format_exc())
|
logger.error(traceback.format_exc())
|
||||||
return "Server error", 500
|
return "Server error", 500
|
||||||
|
|
||||||
@self.app.route("/admin")
|
@self.app.route("/admin")
|
||||||
@with_session
|
@with_session
|
||||||
async def admin(request):
|
async def admin(request):
|
||||||
|
"""
|
||||||
|
Static endpoint for the Admin interface, currently only displays our PSKs
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
return Template("Splash/admin/admin.html").render(psks=self.getPSKs())
|
return Template("Splash/admin/admin.html").render(psks=self.getPSKs())
|
||||||
except Exception as e:
|
except Exception:
|
||||||
logger.error(traceback.format_exc())
|
logger.error(traceback.format_exc())
|
||||||
return "Server error", 500
|
return "Server error", 500
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@self.app.route("/bubble")
|
@self.app.route("/bubble")
|
||||||
@with_websocket
|
@with_websocket
|
||||||
async def bubble(request, ws):
|
async def bubble(request, ws):
|
||||||
"""
|
"""
|
||||||
Websocket handler that bridges HTMX to our transmitter
|
Websocket handler that bridges HTMX to our transmitter for client side PierMesh operations
|
||||||
|
|
||||||
Notes
|
Notes
|
||||||
-----
|
-----
|
||||||
|
@ -148,7 +176,7 @@ class Server:
|
||||||
try:
|
try:
|
||||||
r = await ws.receive()
|
r = await ws.receive()
|
||||||
message = json.loads(r)
|
message = json.loads(r)
|
||||||
#logger.debug(json.dumps(message, indent=4))
|
# logger.debug(json.dumps(message, indent=4))
|
||||||
trigger = message["HEADERS"]["HX-Trigger"]
|
trigger = message["HEADERS"]["HX-Trigger"]
|
||||||
logger.debug(f"Trigger: {trigger}")
|
logger.debug(f"Trigger: {trigger}")
|
||||||
# TODO: Drop old id from cache on regen
|
# TODO: Drop old id from cache on regen
|
||||||
|
@ -216,7 +244,7 @@ class Server:
|
||||||
message["body"],
|
message["body"],
|
||||||
fins=message["finsStr"].split(","),
|
fins=message["finsStr"].split(","),
|
||||||
)
|
)
|
||||||
if res == False:
|
if not res:
|
||||||
await ws.send(
|
await ws.send(
|
||||||
'<div id="catchDisplay">{0}</div>'.format(
|
'<div id="catchDisplay">{0}</div>'.format(
|
||||||
"Searching PierMesh for Catch please wait...<img src='/res/img/searching.gif'>"
|
"Searching PierMesh for Catch please wait...<img src='/res/img/searching.gif'>"
|
||||||
|
@ -235,7 +263,7 @@ class Server:
|
||||||
fins = message["fins"]
|
fins = message["fins"]
|
||||||
# TODO: Handling multiple results
|
# TODO: Handling multiple results
|
||||||
q = self.remoteCatchIndex.search(q)[0]
|
q = self.remoteCatchIndex.search(q)[0]
|
||||||
if q != False:
|
if q:
|
||||||
m = CatchRequest(
|
m = CatchRequest(
|
||||||
peerID,
|
peerID,
|
||||||
000000,
|
000000,
|
||||||
|
@ -347,24 +375,26 @@ class Server:
|
||||||
await ws.send(
|
await ws.send(
|
||||||
"""<div id="chat_room" hx-swap-oob="beforeend">hi</div>"""
|
"""<div id="chat_room" hx-swap-oob="beforeend">hi</div>"""
|
||||||
)
|
)
|
||||||
except WebSocketError as e:
|
except WebSocketError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Uncomment below for WebSocket debugging
|
# Uncomment below for WebSocket debugging
|
||||||
logger.debug(traceback.format_exc())
|
logger.debug(traceback.format_exc())
|
||||||
return "Server error", 500
|
return "Server error", 500
|
||||||
except Exception as e:
|
except Exception:
|
||||||
logger.error(traceback.format_exc())
|
logger.error(traceback.format_exc())
|
||||||
return "Server error", 500
|
return "Server error", 500
|
||||||
|
|
||||||
async def getPSKs(self):
|
async def getPSKs(self):
|
||||||
|
"""
|
||||||
|
Get all PSKs for display
|
||||||
|
"""
|
||||||
psks = [
|
psks = [
|
||||||
{"psk": v["PSK"], "nodeID": k}
|
{"psk": v["PSK"], "nodeID": k}
|
||||||
for k, v in self.cryptographyInfo["msg"].items()
|
for k, v in self.cryptographyInfo["msg"].items()
|
||||||
]
|
]
|
||||||
return psks
|
return psks
|
||||||
|
|
||||||
# TODO: Send catch to catch display
|
|
||||||
async def sendToPeer(self, peerID: str, data: str, target: str):
|
async def sendToPeer(self, peerID: str, data: str, target: str):
|
||||||
"""
|
"""
|
||||||
Send data to Websocket of peer with peerID
|
Send data to Websocket of peer with peerID
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
serve: Web UI server
|
serve: Web UI server
|
||||||
============================
|
====================
|
||||||
|
|
||||||
.. autoclass:: Splash.serve.Server
|
.. autoclass:: Splash.serve.Server
|
||||||
:members:
|
:members:
|
||||||
|
|
|
@ -8,6 +8,10 @@ logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
|
|
||||||
class YCTX(Context):
|
class YCTX(Context):
|
||||||
|
"""
|
||||||
|
Context data structure for message parsing
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
packetsID,
|
packetsID,
|
||||||
|
@ -21,6 +25,37 @@ class YCTX(Context):
|
||||||
submessagesIDs=[],
|
submessagesIDs=[],
|
||||||
eData=None,
|
eData=None,
|
||||||
):
|
):
|
||||||
|
"""
|
||||||
|
packetsID: int
|
||||||
|
Identifier for message
|
||||||
|
|
||||||
|
packetCount: int
|
||||||
|
Number of packets in message
|
||||||
|
|
||||||
|
pAction: str
|
||||||
|
Action to execute after parsing
|
||||||
|
|
||||||
|
todo
|
||||||
|
Queue of actions to execute in the main loop
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
subMessage: bool
|
||||||
|
Whether this is a submessage
|
||||||
|
|
||||||
|
subMessages: dict
|
||||||
|
Dict of parsed submessages of primary message
|
||||||
|
|
||||||
|
submessagesIDs: list
|
||||||
|
List of required submessages to complete full message
|
||||||
|
|
||||||
|
eData
|
||||||
|
Any extra data
|
||||||
|
"""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
sourceNode=sourceNode,
|
sourceNode=sourceNode,
|
||||||
packetsID=packetsID,
|
packetsID=packetsID,
|
||||||
|
@ -36,13 +71,56 @@ class YCTX(Context):
|
||||||
|
|
||||||
|
|
||||||
class Yellow:
|
class Yellow:
|
||||||
# TODO: Submessage completion actions
|
"""
|
||||||
|
Message parser that's subclassed to easily make parsers for specific protocols
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
yctx: YCTX
|
||||||
|
Message parsing context
|
||||||
|
|
||||||
|
message
|
||||||
|
|
||||||
|
submessages: dict
|
||||||
|
Dictionary of submessages
|
||||||
|
|
||||||
|
submessagesIDs: list
|
||||||
|
List of required submessages
|
||||||
|
|
||||||
|
finishedSubmessages: dict
|
||||||
|
Dictionary of finished submessages
|
||||||
|
|
||||||
|
dataOrder: list
|
||||||
|
List that maps packets based on their received order
|
||||||
|
|
||||||
|
data: list
|
||||||
|
Data of primary message
|
||||||
|
|
||||||
|
nonce
|
||||||
|
Cryptography artifact for decrypting message
|
||||||
|
|
||||||
|
tag
|
||||||
|
Cryptography artifact for decrypting message
|
||||||
|
|
||||||
|
gotHead: bool
|
||||||
|
Whether we've gotten the head/header packet
|
||||||
|
|
||||||
|
todo
|
||||||
|
Queue of actions to execute in the main loop
|
||||||
|
|
||||||
|
"""
|
||||||
pActions = []
|
pActions = []
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
yctx: YCTX,
|
yctx: YCTX,
|
||||||
):
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
yctx: YCTX
|
||||||
|
Message parsing context
|
||||||
|
"""
|
||||||
self.yctx = yctx
|
self.yctx = yctx
|
||||||
self.message = None
|
self.message = None
|
||||||
self.submessages = yctx["subMessages"]["val"]
|
self.submessages = yctx["subMessages"]["val"]
|
||||||
|
@ -54,7 +132,7 @@ class Yellow:
|
||||||
self.tag = None
|
self.tag = None
|
||||||
self.gotHead = False
|
self.gotHead = False
|
||||||
self.todo = yctx.todo
|
self.todo = yctx.todo
|
||||||
if yctx["eData"]["val"] != None:
|
if yctx["eData"]["val"] is not None:
|
||||||
self.dataOrder = yctx["eData"]["val"]["dataOrder"]
|
self.dataOrder = yctx["eData"]["val"]["dataOrder"]
|
||||||
self.data = yctx["eData"]["val"]["data"]
|
self.data = yctx["eData"]["val"]["data"]
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
Sponge.Protocols.Yellow
|
||||||
|
=======================
|
||||||
|
|
||||||
|
.. autoclass:: Sponge.Protocols.Yellow.YCTX
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
.. autoclass:: Sponge.Protocols.Yellow.Yellow
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -3,6 +3,7 @@ from Sponge.Protocols.Yellow import Yellow
|
||||||
# TODO: Forwarding message to next node
|
# TODO: Forwarding message to next node
|
||||||
# TODO: Method to get next node in path to recipient node
|
# TODO: Method to get next node in path to recipient node
|
||||||
|
|
||||||
|
|
||||||
class Bubble(Yellow):
|
class Bubble(Yellow):
|
||||||
"""
|
"""
|
||||||
Peer to peer protol
|
Peer to peer protol
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
Sponge.Protocols.bubble
|
||||||
|
=======================
|
||||||
|
|
||||||
|
.. autoclass:: Sponge.Protocols.bubble.Bubble
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -7,5 +7,5 @@ class Catch(Yellow):
|
||||||
|
|
||||||
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py>`__
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py>`__
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pActions = ["sendCatch", "routeCatch", "syncIndex"]
|
pActions = ["sendCatch", "routeCatch", "syncIndex"]
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
Sponge.Protocols.catch
|
||||||
|
=======================
|
||||||
|
|
||||||
|
.. autoclass:: Sponge.Protocols.catch.Catch
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -0,0 +1,6 @@
|
||||||
|
Sponge.Protocols.cryptography
|
||||||
|
=============================
|
||||||
|
|
||||||
|
.. autoclass:: Sponge.Protocols.cryptography.CryptographyFilter
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -1,5 +1,6 @@
|
||||||
from Sponge.Protocols.Yellow import Yellow
|
from Sponge.Protocols.Yellow import Yellow
|
||||||
|
|
||||||
|
# NOTE: Placeholder
|
||||||
|
|
||||||
class Daisy(Yellow):
|
class Daisy(Yellow):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -8,4 +8,3 @@ class Hopper(Yellow):
|
||||||
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/hopper.py>`__
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/hopper.py>`__
|
||||||
"""
|
"""
|
||||||
pActions = ["hop", "routeHop"]
|
pActions = ["hop", "routeHop"]
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue