Compare commits
4 Commits
935cdc4952
...
46997b1f95
Author | SHA1 | Date |
---|---|---|
|
46997b1f95 | |
|
bda89ca795 | |
|
46a9cc3ee9 | |
|
66f3cbe401 |
|
@ -8,3 +8,15 @@ src/Splash/res/js/node_modules/
|
||||||
src/daisy/
|
src/daisy/
|
||||||
src/catch/
|
src/catch/
|
||||||
src/logs/
|
src/logs/
|
||||||
|
zims/
|
||||||
|
project.matrix
|
||||||
|
piermesh.nvim
|
||||||
|
piermesh
|
||||||
|
node00/*
|
||||||
|
node00/
|
||||||
|
*.index
|
||||||
|
*.log
|
||||||
|
*.vim
|
||||||
|
*.workspace
|
||||||
|
.workspace
|
||||||
|
.workspace.backup
|
||||||
|
|
|
@ -6,13 +6,18 @@
|
||||||
|
|
||||||
<a id="module-Components.hopper"></a>
|
<a id="module-Components.hopper"></a>
|
||||||
|
|
||||||
### Components.hopper.get(url: str, params=None)
|
### Components.hopper.downloadFile(url, text=True, mimeType=None)
|
||||||
|
|
||||||
|
Download resource from url and convert it to text or a data url
|
||||||
|
|
||||||
|
### Components.hopper.get(url: str, params=None, followTags=None)
|
||||||
|
|
||||||
http/s get request
|
http/s get request
|
||||||
|
|
||||||
* **Parameters:**
|
* **Parameters:**
|
||||||
* **url** (*str*)
|
* **url** (*str*)
|
||||||
* **params** – Requests (library) parameters
|
* **params** – Requests (library) parameters
|
||||||
|
* **followTags** – None or list of tags to download the src/href from
|
||||||
|
|
||||||
### Components.hopper.post(url: str, params=None)
|
### Components.hopper.post(url: str, params=None)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
<a id="context"></a>
|
||||||
|
|
||||||
|
# Context
|
||||||
|
|
||||||
|
### *class* Config.Context.Context(subsets: dict = {}, \*\*kwargs)
|
||||||
|
|
||||||
|
Generic context data structure, current subclassed for use in filters, see Sponge/Protocols/Yellow.py
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Config/Context.py)
|
||||||
|
|
||||||
|
#### ctx
|
||||||
|
|
||||||
|
Dictionary of context values
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
|
@ -1,22 +1,11 @@
|
||||||
<a id="whalesong-diffie-hellman-ephemeral-fernet-based-encryption"></a>
|
<a id="whalesong"></a>
|
||||||
|
|
||||||
# WhaleSong: Diffie hellman ephemeral Fernet based encryption
|
# WhaleSong
|
||||||
|
|
||||||
### *class* Cryptography.WhaleSong.DHEFern(cache, nodeNickname, cLog)
|
### *class* Cryptography.WhaleSong.Transport(cache, nodeNickname, daisyCryptography, psk)
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py)
|
||||||
|
|
||||||
#### cLog
|
|
||||||
|
|
||||||
Method reference to run.Node.cLog so we can log to the ui from here
|
|
||||||
|
|
||||||
#### loadedParams
|
|
||||||
|
|
||||||
In memory representations of cryptography parameters
|
|
||||||
|
|
||||||
* **Type:**
|
|
||||||
dict
|
|
||||||
|
|
||||||
#### loadedKeys
|
#### loadedKeys
|
||||||
|
|
||||||
In memory representations of cryptography keys
|
In memory representations of cryptography keys
|
||||||
|
@ -36,7 +25,7 @@ Name of node for isolating configs when running multiple nodes
|
||||||
Daisy cache for use in storing cryptography information
|
Daisy cache for use in storing cryptography information
|
||||||
|
|
||||||
* **Type:**
|
* **Type:**
|
||||||
Components.daisy.Cache
|
[Daisy.Cache.Cache](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md#Daisy.Cache.Cache)
|
||||||
|
|
||||||
#### publicKey
|
#### publicKey
|
||||||
|
|
||||||
|
@ -46,20 +35,52 @@ Public key for node
|
||||||
|
|
||||||
Private key for node
|
Private key for node
|
||||||
|
|
||||||
#### checkInMem(store: str, nodeID: str)
|
#### daisyCryptography
|
||||||
|
|
||||||
Check if parameters or keys are loaded for node of nodeID
|
Record cryptography reference
|
||||||
|
|
||||||
* **Parameters:**
|
* **Type:**
|
||||||
**store** (*str*) – Whether to check loaded keys or parameters
|
[Daisy.CryptographyUtil.SteelPetal](/PierMesh/piermesh/src/branch/main/docs/Daisy/CryptographyUtil.md#Daisy.CryptographyUtil.SteelPetal)
|
||||||
|
|
||||||
#### decrypt(data, nodeID: str)
|
#### addPeerEphemeralKey(onodeID, peerEphemeralKey: bytes)
|
||||||
|
|
||||||
Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load)
|
Add a peer node’s epehemeral key for session encryption
|
||||||
|
|
||||||
#### encrypt(data, nodeID: str, isDict: bool = True)
|
onodeID
|
||||||
|
: Node identifier
|
||||||
|
|
||||||
Do Fernet encryption
|
peerEphemeralKey: bytes
|
||||||
|
: Serialized ephemeral key
|
||||||
|
|
||||||
|
#### addPublickey(onodeID, publicKey, forSelf: bool = False)
|
||||||
|
|
||||||
|
Add a public key for a given node including this one
|
||||||
|
|
||||||
|
onodeID
|
||||||
|
: Node identifier
|
||||||
|
|
||||||
|
publicKey
|
||||||
|
: Public key to add
|
||||||
|
|
||||||
|
forSelf: bool
|
||||||
|
: Whether to add key for this node
|
||||||
|
|
||||||
|
#### decrypt(data, onodeID: str, nonce, tag)
|
||||||
|
|
||||||
|
Decrypt bytes and return either str or dict depending on result
|
||||||
|
|
||||||
|
onodeID: str
|
||||||
|
: Node identifier
|
||||||
|
|
||||||
|
nonce
|
||||||
|
: Encryption nonce
|
||||||
|
|
||||||
|
tag
|
||||||
|
: Encryption tag
|
||||||
|
|
||||||
|
#### encrypt(data, nodeID: str, isDict: bool = True, pskEncrypt=False)
|
||||||
|
|
||||||
|
Encrypt given data with AES GCM
|
||||||
|
|
||||||
data
|
data
|
||||||
: Either bytes or dict to encrypt
|
: Either bytes or dict to encrypt
|
||||||
|
@ -67,53 +88,42 @@ data
|
||||||
isDict: bool
|
isDict: bool
|
||||||
: Whether data is a dictionary
|
: Whether data is a dictionary
|
||||||
|
|
||||||
#### genKeyPair(paramsOverride=False, setSelf: bool = True)
|
pskEncrypt: bool
|
||||||
|
: Whether to encrypt with pre-shared key
|
||||||
|
|
||||||
Generate public and private keys from self.params (TODO: Gen from passed params)
|
#### genOurEphemeralKey(onodeID)
|
||||||
|
|
||||||
paramsOverride
|
Generate epehemeral key for session encryption with given node
|
||||||
: False or parameters to use (TODO)
|
|
||||||
|
|
||||||
setSelf: bool
|
#### genStaticKey(onodeID)
|
||||||
: Whether to set self.privateKey and self.publicKey
|
|
||||||
|
|
||||||
#### genParams()
|
Generate static key for session encryption with given node
|
||||||
|
|
||||||
Generate Diffie Hellman parameters
|
#### generateSessionKey(onodeID)
|
||||||
|
|
||||||
#### getParamsBytes()
|
Generate session key for transport encryption
|
||||||
|
|
||||||
Get bytes encoded from self.parameters (TODO: Encode from store)
|
onodeID
|
||||||
|
: Node identifier
|
||||||
|
|
||||||
#### getRecord(store: str, key: str)
|
#### getRecord(store: str, key: str, ephemeral=False)
|
||||||
|
|
||||||
Get record from store: store with key: key
|
Get record from store: store with key: key
|
||||||
|
|
||||||
#### getSalt()
|
|
||||||
|
|
||||||
Get random salt
|
|
||||||
|
|
||||||
#### initStore(store: str)
|
#### initStore(store: str)
|
||||||
|
|
||||||
Initialize store: store
|
Initialize store: store
|
||||||
|
|
||||||
#### keyDerive(pubKey: bytes, salt: bytes, nodeID: str, params: bytes)
|
#### kdf(bytesX)
|
||||||
|
|
||||||
Derive shared key using Diffie Hellman
|
Key derivation function
|
||||||
|
|
||||||
pubKey: bytes
|
#### sessionSetup(onodeID, peerEphemeralKey: bytes)
|
||||||
: Public key
|
|
||||||
|
|
||||||
nodeID: str
|
Set up transport encryption session
|
||||||
: PierMesh node ID
|
|
||||||
|
|
||||||
params: bytes
|
onodeID
|
||||||
: Encryption parameters
|
: Node identifier
|
||||||
|
|
||||||
#### loadParamBytes(pemBytes: bytes)
|
peerEphemeralKey: bytes
|
||||||
|
: Serialized ephemeral key
|
||||||
Load parameters to self.params from given bytes (TODO: Load from store)
|
|
||||||
|
|
||||||
#### loadRecordToMem(store: str, nodeID: str)
|
|
||||||
|
|
||||||
Load record of nodeID from store to either keys or pameters
|
|
||||||
|
|
|
@ -2,19 +2,20 @@
|
||||||
|
|
||||||
# Daisy based cache
|
# Daisy based cache
|
||||||
|
|
||||||
### *class* Daisy.Cache.Cache(filepaths=None, cacheFile=None, path: str = 'daisy', walk: bool = False, isCatch: bool = False)
|
### *class* Daisy.Cache.Cache(daisyCryptography, filepaths=None, cacheFile=None, path: str = 'daisy', walk: bool = False)
|
||||||
|
|
||||||
In memory collection of Daisy records
|
In memory collection of Daisy records, provides a search functionality currently utilized by Daisy.Catch.Catch
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Cache.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Cache.py)
|
||||||
|
|
||||||
#### create(path: str, data: dict)
|
#### create(path: str, data: dict, remote=False)
|
||||||
|
|
||||||
Create new record
|
Create new record
|
||||||
|
|
||||||
* **Parameters:**
|
* **Parameters:**
|
||||||
* **path** (*str*) – Path to create record at
|
* **path** (*str*) – Path to create record at
|
||||||
* **data** (*dict*) – Data to populate record with
|
* **data** (*dict*) – Data to populate record with
|
||||||
|
* **remote** (*bool*) – Whether this is a reference to a distributed file (not implemented yet)
|
||||||
|
|
||||||
#### get(path: str)
|
#### get(path: str)
|
||||||
|
|
||||||
|
@ -29,10 +30,10 @@ Reload from disk to memory
|
||||||
|
|
||||||
#### search(keydict: dict, strict: bool = True)
|
#### search(keydict: dict, strict: bool = True)
|
||||||
|
|
||||||
Search cache for record for records with values
|
Search cache for record for records with keys and values matching those
|
||||||
|
in the keydict
|
||||||
|
|
||||||
keydict: dict
|
keydict: dict
|
||||||
: Values to search for
|
|
||||||
|
|
||||||
strict: bool
|
strict: bool
|
||||||
: Whether to require values match
|
: Whether to require all keys/values match
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
# Daisy Catch cache
|
# Daisy Catch cache
|
||||||
|
|
||||||
### *class* Daisy.Catch.Catch(path: str = 'catch', filepaths=None, catchFile=None, walk: bool = False)
|
### *class* Daisy.Catch.Catch(daisyCryptography, path: str = 'catch', filepaths=None, catchFile=None, walk: bool = False)
|
||||||
|
|
||||||
Sub class of Cache for handling catchs
|
Sub class of Cache for handling catchs
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ Sub class of Cache for handling catchs
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Catch.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Catch.py)
|
||||||
|
|
||||||
#### get(head: str, tail: str, fins=None)
|
#### get(head: str, body: str, fins=None)
|
||||||
|
|
||||||
Get catch by pieces
|
Get catch by pieces
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="credential"></a>
|
||||||
|
|
||||||
|
# Credential
|
||||||
|
|
||||||
|
### *class* Daisy.Credential.Credential(nodeNickname, credentialName, extension, daisyCryptography)
|
||||||
|
|
||||||
|
Currently unused credential class, will be fleshed out for credentialed access to the web ui
|
|
@ -0,0 +1,28 @@
|
||||||
|
<a id="cryptographyutil"></a>
|
||||||
|
|
||||||
|
# CryptographyUtil
|
||||||
|
|
||||||
|
### *class* Daisy.CryptographyUtil.SteelPetal(key: str, nonce=None, testData=None)
|
||||||
|
|
||||||
|
Cryptography utility for encrypting files
|
||||||
|
|
||||||
|
#### decrypt(data: bytes)
|
||||||
|
|
||||||
|
Decrypt encrypted binary data
|
||||||
|
|
||||||
|
data: bytes
|
||||||
|
: Data to decrypt
|
||||||
|
|
||||||
|
#### encrypt(data: bytes)
|
||||||
|
|
||||||
|
Encrypt binary data
|
||||||
|
|
||||||
|
data: bytes
|
||||||
|
: Data to encrypt
|
||||||
|
|
||||||
|
#### pad(key: str)
|
||||||
|
|
||||||
|
Pad key to make it usable
|
||||||
|
|
||||||
|
key: str
|
||||||
|
: User’s plain text key
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
# Daisy
|
# Daisy
|
||||||
|
|
||||||
### *class* Daisy.Daisy.Daisy(filepath: str, templates: dict = {}, template: bool = False, prefillDict: bool = False)
|
### *class* Daisy.Daisy.Daisy(filepath: str, daisyCryptography, templates: dict = {}, template: bool = False, prefillDict: bool = False, remote=False)
|
||||||
|
|
||||||
Base class for Daisy data representation
|
Base class for Daisy data representation
|
||||||
|
|
||||||
|
@ -33,6 +33,13 @@ Get record dictionary from memory
|
||||||
* **Return type:**
|
* **Return type:**
|
||||||
dict
|
dict
|
||||||
|
|
||||||
|
#### json_to_msg(path: str)
|
||||||
|
|
||||||
|
Convert json at the path plus .json to a msgpack binary
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
**path** (*str*) – Path to json minus the extension
|
||||||
|
|
||||||
#### read(decrypt: bool = False, decryptKey=False)
|
#### read(decrypt: bool = False, decryptKey=False)
|
||||||
|
|
||||||
Read record from disk to memory
|
Read record from disk to memory
|
||||||
|
@ -47,7 +54,7 @@ Lists contents of directory if object is a directory, otherwise return None
|
||||||
|
|
||||||
#### write(override=False, encrypt: bool = False, encryptKey=None, recur: bool = False)
|
#### write(override=False, encrypt: bool = False, encryptKey=None, recur: bool = False)
|
||||||
|
|
||||||
Write record to disk
|
Write record to disk, note: use override with updated record to update record
|
||||||
|
|
||||||
* **Parameters:**
|
* **Parameters:**
|
||||||
* **override** – Either false or a dictionary of values to set on the record
|
* **override** – Either false or a dictionary of values to set on the record
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
<a id="index"></a>
|
||||||
|
|
||||||
|
# Index
|
||||||
|
|
||||||
|
### *class* Daisy.Index.Index(nodeNickname: str, daisyCryptography, prefill: list = [], indexedFields: list = [], autoIndex: bool = True)
|
||||||
|
|
||||||
|
A searchable index of records, this is currently only half implemented
|
||||||
|
but works enough to hold our remote catch index
|
||||||
|
|
||||||
|
#### addEntry(entry: dict)
|
||||||
|
|
||||||
|
Add a record to the index
|
||||||
|
|
||||||
|
entry: dict
|
||||||
|
: Record to add to the index
|
||||||
|
|
||||||
|
#### search(keydict: dict, strict: bool = True)
|
||||||
|
|
||||||
|
Search index for record for records with values
|
||||||
|
|
||||||
|
keydict: dict
|
||||||
|
: Keys/Values to search for
|
||||||
|
|
||||||
|
strict: bool
|
||||||
|
: Whether to require all keys/values match
|
|
@ -0,0 +1,13 @@
|
||||||
|
<a id="ref"></a>
|
||||||
|
|
||||||
|
# Ref
|
||||||
|
|
||||||
|
### *class* Daisy.Ref.Ref(metadata: dict, path: str, remoteNodeID: str)
|
||||||
|
|
||||||
|
Reference to a remote record
|
||||||
|
|
||||||
|
metadata: dict
|
||||||
|
: Data to fill record with, should only be metadata
|
||||||
|
|
||||||
|
path: str
|
||||||
|
: Where to store data locally
|
|
@ -1,16 +0,0 @@
|
||||||
<a id="soil-daisy-signal-management"></a>
|
|
||||||
|
|
||||||
# Soil: Daisy signal management
|
|
||||||
|
|
||||||
### *class* Daisy.Soil.Compound(cache, isCatch: bool = False)
|
|
||||||
|
|
||||||
File system watcher to propagate disk changes
|
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Soil.py)
|
|
||||||
|
|
||||||
#### on_any_event(event)
|
|
||||||
|
|
||||||
Called when a CRUD operation is performed on a record file
|
|
||||||
|
|
||||||
* **Parameters:**
|
|
||||||
**event** – Event object provided by watchdog
|
|
|
@ -2,12 +2,39 @@
|
||||||
|
|
||||||
# Store: Daisy key value store
|
# Store: Daisy key value store
|
||||||
|
|
||||||
### *class* Daisy.Store.Store(store: str, path: str, nodeNickname: str)
|
### *class* Daisy.Store.Store(store: str, path: str, nodeNickname: str, daisyCryptography)
|
||||||
|
|
||||||
Key value store
|
Key value store
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Store.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Store.py)
|
||||||
|
|
||||||
#### getRecord(key: str)
|
#### epehemeral
|
||||||
|
|
||||||
#### update(entry: str, data, recur: bool = True)
|
Memory only records
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### createEmpty(key: str)
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
* **key** (*str*)
|
||||||
|
* **key** – Key to create empty record at
|
||||||
|
|
||||||
|
#### getRecord(key: str, ephemeral=False)
|
||||||
|
|
||||||
|
Get record at key
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
* **key** (*str*)
|
||||||
|
* **ephemeral** (*bool*) – Whether key is only in memory, used for session cryptography credentials currently
|
||||||
|
|
||||||
|
#### update(entry: str, data, recur: bool = True, write=True)
|
||||||
|
|
||||||
|
Update given record
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
* **entry** (*str*) – Key to update record of
|
||||||
|
* **data** – Data to update record with
|
||||||
|
* **recur** (*bool*) – Whether to iterate over data
|
||||||
|
* **write** (*bool*) – Whether record is ephemeral
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
# Header packet: Metadata packet
|
# Header packet: Metadata packet
|
||||||
|
|
||||||
### *class* Packets.HeaderPacket.Header(packetsID: int, packetCount: int, sender: int, senderDisplayName: int, recipient: int, recipientNode: int, subpacket: bool = False, wantFullResponse: bool = False, packetsClass: int = 0, pAction: int = -1)
|
### *class* Packets.HeaderPacket.Header(packetsID: int, packetCount: int, sender: int, senderDisplayName: int, sourceNode: int, recipient: int, recipientNode: int, wantFullResponse: bool = False, packetsClass: int = 0, pAction: int = -1, target=True)
|
||||||
|
|
||||||
Metadata packet for messages
|
Metadata packet for messages
|
||||||
|
|
||||||
|
@ -52,7 +52,7 @@ Whether a response should be sent when the message completes reception (TODO)
|
||||||
|
|
||||||
#### pAction
|
#### pAction
|
||||||
|
|
||||||
3 digit (maximum) pAction ID for mapping precise actions within a protocol (TODO)
|
3 digit (maximum) pAction ID for mapping precise actions within a protocol
|
||||||
|
|
||||||
* **Type:**
|
* **Type:**
|
||||||
int
|
int
|
||||||
|
@ -61,6 +61,6 @@ Whether a response should be sent when the message completes reception (TODO)
|
||||||
|
|
||||||
Dump packet to msgpack encoded binary for transmission
|
Dump packet to msgpack encoded binary for transmission
|
||||||
|
|
||||||
#### usePreset(path: str)
|
#### usePreset(path: str, daisyCryptography)
|
||||||
|
|
||||||
Add preset fields to the packet
|
Add preset fields to the packet, currently unused
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="bubble-bubble"></a>
|
||||||
|
|
||||||
|
# bubble.Bubble
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.bubble.Bubble.Bubble(sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo, data)
|
||||||
|
|
||||||
|
Send data from peer to peer
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="catch-indexsync"></a>
|
||||||
|
|
||||||
|
# catch.IndexSync
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.catch.IndexSync.IndexSync(sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo, index, target=False)
|
||||||
|
|
||||||
|
Sync indices of Catchs across nodes
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="catch-request"></a>
|
||||||
|
|
||||||
|
# catch.Request
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.catch.Request.CatchRequest(sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo, head, body, fins, pskEncrypt=False)
|
||||||
|
|
||||||
|
Request Catch (website) from another node
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="catch-response"></a>
|
||||||
|
|
||||||
|
# catch.Response
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.catch.Response.CatchResponse(sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo, html, pskEncrypt=False)
|
||||||
|
|
||||||
|
Send local Catch (website) to user who requested it
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="cryptography-handshake"></a>
|
||||||
|
|
||||||
|
# cryptography.Handshake
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.cryptography.Handshake.Handshake(sender, senderID, recipient, recipientNode, cryptographyInfo, onodeID, sourceNode)
|
||||||
|
|
||||||
|
Provides the ephemeral key for session encryption
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="hopper-request"></a>
|
||||||
|
|
||||||
|
# hopper.Request
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.hopper.Request.HopperRequest(sender, senderID, sourceNode, recipient, recipientNode, url: str, params: dict, method: str, cryptographyInfo)
|
||||||
|
|
||||||
|
Proxy request to main internet from remote node
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="hopper-response"></a>
|
||||||
|
|
||||||
|
# hopper.Response
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.hopper.Response.HopperResponse(sender, senderID, sourceNode, recipient, recipientNode, response, cryptographyInfo)
|
||||||
|
|
||||||
|
Send proxied request back to requester
|
|
@ -0,0 +1,7 @@
|
||||||
|
<a id="map-announce"></a>
|
||||||
|
|
||||||
|
# map.Announce
|
||||||
|
|
||||||
|
### *class* Packets.Messages.Protocols.map.Announce.AnnounceMessage(sender, senderID, sourceNode, cryptographyInfo, mapping)
|
||||||
|
|
||||||
|
Announce the network map details and public key of the node for discovery
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
# Packet: Base packet
|
# Packet: Base packet
|
||||||
|
|
||||||
### *class* Packets.Packet.Packet(data: bytes, packetsID: int = -1, packetNumber=False, packetCount: int = 1, packetsClass: int = -1)
|
### *class* Packets.Packet.Packet(data: bytes, packetsID: int = -1, packetNumber=False, packetCount: int = 1, packetsClass: int = -1, primaryMessage=None)
|
||||||
|
|
||||||
Base class for Packets
|
Base class for Packets
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,6 @@
|
||||||
|
|
||||||
# SubMessage: Additional data for larger messages
|
# SubMessage: Additional data for larger messages
|
||||||
|
|
||||||
### *class* Packets.SubMessage.SubMessage
|
### *class* Packets.SubMessage.SubMessage(sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo, protocolID, pAction, data, target=True, primaryMessage=None)
|
||||||
|
|
||||||
TODO
|
SubMessage to a primary message, enables us to send more/dynamic data
|
||||||
|
|
|
@ -2,6 +2,6 @@
|
||||||
|
|
||||||
# SubPacket: Packets for submessages
|
# SubPacket: Packets for submessages
|
||||||
|
|
||||||
### *class* Packets.SubPacket.SubPacket
|
### *class* Packets.SubMessage.SubMessage(sender, senderID, sourceNode, recipient, recipientNode, cryptographyInfo, protocolID, pAction, data, target=True, primaryMessage=None)
|
||||||
|
|
||||||
TODO
|
SubMessage to a primary message, enables us to send more/dynamic data
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
<a id="services-action"></a>
|
||||||
|
|
||||||
|
# Services.Action
|
||||||
|
|
||||||
|
### *class* Services.Action.Action(action, data, sender=None, senderID=None, sourceNode=None, recipient=None, recipientNode=None)
|
||||||
|
|
||||||
|
Generic action class for triggering actions from sub processes on the main thread
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Services/Action.py)
|
||||||
|
|
||||||
|
#### action
|
||||||
|
|
||||||
|
Action to run
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
str
|
||||||
|
|
||||||
|
#### data
|
||||||
|
|
||||||
|
Data to pass to action
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### sender
|
||||||
|
|
||||||
|
Sender identifier
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
str
|
||||||
|
|
||||||
|
#### senderID
|
||||||
|
|
||||||
|
Sender second level identifier
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
str
|
||||||
|
|
||||||
|
#### sourceNode
|
||||||
|
|
||||||
|
Sending node
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
str
|
||||||
|
|
||||||
|
#### recipient
|
||||||
|
|
||||||
|
Peer identifier to route to
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
str
|
||||||
|
|
||||||
|
#### recipientNode
|
||||||
|
|
||||||
|
Intended destination node identifier
|
||||||
|
|
||||||
|
#### getAction()
|
||||||
|
|
||||||
|
#### getData()
|
|
@ -36,7 +36,7 @@ Map of PierMesh node IDs to MeshTastic node IDs
|
||||||
* **Type:**
|
* **Type:**
|
||||||
dict
|
dict
|
||||||
|
|
||||||
#### addLookup(onodeID: str, mnodeID: str)
|
#### *async* addLookup(onodeID: str, mnodeID: str)
|
||||||
|
|
||||||
Adds node to lookup
|
Adds node to lookup
|
||||||
|
|
||||||
|
@ -92,3 +92,11 @@ Import map from path
|
||||||
#### render(pathPrefix: str = '')
|
#### render(pathPrefix: str = '')
|
||||||
|
|
||||||
Render outer and inner network map to disk at the given path prefix
|
Render outer and inner network map to disk at the given path prefix
|
||||||
|
|
||||||
|
#### syncaddLookup(onodeID: str, mnodeID: str)
|
||||||
|
|
||||||
|
Adds node to lookup
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
* **onodeID** (*str*) – Internal nodeID
|
||||||
|
* **mnodeID** (*str*) – MeshTastic nodeID
|
||||||
|
|
|
@ -2,22 +2,18 @@
|
||||||
|
|
||||||
# serve: Web UI server
|
# serve: Web UI server
|
||||||
|
|
||||||
### *class* Splash.serve.Server(transceiver, catch, onodeID, network, cLog)
|
### *class* Splash.serve.Server(transceiver, catch, onodeID, network, cryptographyInfo, remoteCatchIndex, cache)
|
||||||
|
|
||||||
Web server that serves the web ui and provides web to node communication
|
Web server that serves the web ui and provides web to node communication
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/Sponge/serve.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/Sponge/serve.py)
|
||||||
|
|
||||||
#### cLog
|
#### transceiver
|
||||||
|
|
||||||
Reference to run.Node.cLog for logging
|
Reference to our Transceiver.Transceiver.Transceiver instance
|
||||||
|
|
||||||
#### transmitter
|
|
||||||
|
|
||||||
Reference to our Transmission.transmission.Transmitter instance
|
|
||||||
|
|
||||||
* **Type:**
|
* **Type:**
|
||||||
Transmission.transmission.Transmitter
|
[Transceiver.Transceiver.Transceiver](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver)
|
||||||
|
|
||||||
#### network
|
#### network
|
||||||
|
|
||||||
|
@ -51,6 +47,10 @@ Reference to our Catch Cache instance to pull from for serving Catchs
|
||||||
* **Type:**
|
* **Type:**
|
||||||
[Daisy.Catch.Catch](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch)
|
[Daisy.Catch.Catch](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch)
|
||||||
|
|
||||||
#### *async* sendToPeer(peerID: str, data: str)
|
#### *async* getPSKs()
|
||||||
|
|
||||||
|
Get all PSKs for display
|
||||||
|
|
||||||
|
#### *async* sendToPeer(peerID: str, data: str, target: str)
|
||||||
|
|
||||||
Send data to Websocket of peer with peerID
|
Send data to Websocket of peer with peerID
|
||||||
|
|
|
@ -0,0 +1,86 @@
|
||||||
|
<a id="sponge-protocols-yellow"></a>
|
||||||
|
|
||||||
|
# Sponge.Protocols.Yellow
|
||||||
|
|
||||||
|
### *class* Sponge.Protocols.Yellow.YCTX(packetsID, packetCount, pAction, todo, cryptographyInfo, sourceNode, subMessage=False, subMessages={}, submessagesIDs=[], eData=None)
|
||||||
|
|
||||||
|
Context data structure for message parsing
|
||||||
|
|
||||||
|
### *class* Sponge.Protocols.Yellow.Yellow(yctx: [YCTX](#Sponge.Protocols.Yellow.YCTX))
|
||||||
|
|
||||||
|
Message parser that’s subclassed to easily make parsers for specific protocols
|
||||||
|
|
||||||
|
#### yctx
|
||||||
|
|
||||||
|
Message parsing context
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
[YCTX](#Sponge.Protocols.Yellow.YCTX)
|
||||||
|
|
||||||
|
#### message
|
||||||
|
|
||||||
|
#### submessages
|
||||||
|
|
||||||
|
Dictionary of submessages
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### submessagesIDs
|
||||||
|
|
||||||
|
List of required submessages
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
list
|
||||||
|
|
||||||
|
#### finishedSubmessages
|
||||||
|
|
||||||
|
Dictionary of finished submessages
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### dataOrder
|
||||||
|
|
||||||
|
List that maps packets based on their received order
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
list
|
||||||
|
|
||||||
|
#### data
|
||||||
|
|
||||||
|
Data of primary message
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
list
|
||||||
|
|
||||||
|
#### nonce
|
||||||
|
|
||||||
|
Cryptography artifact for decrypting message
|
||||||
|
|
||||||
|
#### tag
|
||||||
|
|
||||||
|
Cryptography artifact for decrypting message
|
||||||
|
|
||||||
|
#### gotHead
|
||||||
|
|
||||||
|
Whether we’ve gotten the head/header packet
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
bool
|
||||||
|
|
||||||
|
#### todo
|
||||||
|
|
||||||
|
Queue of actions to execute in the main loop
|
||||||
|
|
||||||
|
#### checkComplete()
|
||||||
|
|
||||||
|
#### *async* doAct(setpAction=False, repeatDataOnActions=[], subMessage=False)
|
||||||
|
|
||||||
|
#### *async* dump()
|
||||||
|
|
||||||
|
#### *async* id()
|
||||||
|
|
||||||
|
#### pActions *= []*
|
||||||
|
|
||||||
|
#### *async* processPacket(p, subMessage=False, rdoaoc=[])
|
|
@ -0,0 +1,11 @@
|
||||||
|
<a id="sponge-protocols-bubble"></a>
|
||||||
|
|
||||||
|
# Sponge.Protocols.bubble
|
||||||
|
|
||||||
|
### *class* Sponge.Protocols.bubble.Bubble(yctx: [YCTX](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.YCTX))
|
||||||
|
|
||||||
|
Peer to peer protol
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/Sponge/Protocols/bubble.py)
|
||||||
|
|
||||||
|
#### pActions *= ['sendToPeer']*
|
|
@ -0,0 +1,11 @@
|
||||||
|
<a id="sponge-protocols-catch"></a>
|
||||||
|
|
||||||
|
# Sponge.Protocols.catch
|
||||||
|
|
||||||
|
### *class* Sponge.Protocols.catch.Catch(yctx: [YCTX](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.YCTX))
|
||||||
|
|
||||||
|
Catch exchange protocol
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py)
|
||||||
|
|
||||||
|
#### pActions *= ['sendCatch', 'routeCatch', 'syncIndex']*
|
|
@ -0,0 +1,11 @@
|
||||||
|
<a id="sponge-protocols-cryptography"></a>
|
||||||
|
|
||||||
|
# Sponge.Protocols.cryptography
|
||||||
|
|
||||||
|
### *class* Sponge.Protocols.cryptography.CryptographyFilter(yctx: [YCTX](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.YCTX))
|
||||||
|
|
||||||
|
Cryptographic operations protocol
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/cryptography.py)
|
||||||
|
|
||||||
|
#### pActions *= ['initCryptography']*
|
|
@ -0,0 +1,11 @@
|
||||||
|
<a id="sponge-protocols-hopper"></a>
|
||||||
|
|
||||||
|
# Sponge.Protocols.hopper
|
||||||
|
|
||||||
|
### *class* Sponge.Protocols.hopper.Hopper(yctx: [YCTX](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.YCTX))
|
||||||
|
|
||||||
|
Internet inter(h)op protocol
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/hopper.py)
|
||||||
|
|
||||||
|
#### pActions *= ['hop', 'routeHop']*
|
|
@ -0,0 +1,13 @@
|
||||||
|
<a id="sponge-protocols-map"></a>
|
||||||
|
|
||||||
|
# Sponge.Protocols.map
|
||||||
|
|
||||||
|
### *class* Sponge.Protocols.map.Map(yctx: [YCTX](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.YCTX))
|
||||||
|
|
||||||
|
Network mapping protocol
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/map.py)
|
||||||
|
|
||||||
|
#### pActions *= ['map', 'initCryptography']*
|
||||||
|
|
||||||
|
#### process(message, isSubMessage=False)
|
|
@ -2,43 +2,63 @@
|
||||||
|
|
||||||
# base: Primary filtering functionality
|
# base: Primary filtering functionality
|
||||||
|
|
||||||
### *class* Sponge.base.Filter(cache, onodeID, todo, cLog)
|
### *class* Sponge.base.Filter(cache, onodeID, todo, cryptographyInfo)
|
||||||
|
|
||||||
Packet filtering orchestration
|
Packet filtering orchestration
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/base.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/base.py)
|
||||||
|
|
||||||
cLog
|
|
||||||
: Reference to run.Node.cLog for logging
|
|
||||||
|
|
||||||
cache: Daisy.Cache.Cache
|
|
||||||
: Reference to our Daisy Cache instance
|
|
||||||
|
|
||||||
completed: list
|
|
||||||
: List of completed messages IDs
|
|
||||||
|
|
||||||
todo
|
|
||||||
: Reference to list of actions to do in the Node
|
|
||||||
|
|
||||||
onodeID
|
|
||||||
: PierMesh node ID
|
|
||||||
|
|
||||||
#### cache
|
#### cache
|
||||||
|
|
||||||
Messages is temporary storage for unfinished messages
|
Reference to our Daisy Cache instance
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
[Daisy.Cache.Cache](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md#Daisy.Cache.Cache)
|
||||||
|
|
||||||
|
#### onodeID
|
||||||
|
|
||||||
|
PierMesh node ID
|
||||||
|
|
||||||
|
#### todo
|
||||||
|
|
||||||
|
Reference to list of actions to do in the Node
|
||||||
|
|
||||||
|
#### cryptographyInfo
|
||||||
|
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
[Cryptography.WhaleSong.Transport](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport)
|
||||||
|
|
||||||
|
#### messages
|
||||||
|
|
||||||
|
Temporary storage for unfinished messages
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### submessages
|
||||||
|
|
||||||
|
Temporary storage for unfinished submessages
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### completed
|
||||||
|
|
||||||
|
List of finished message ids so we don’t reprocess messages
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
list
|
||||||
|
|
||||||
#### mCheck(payload: bytes)
|
#### mCheck(payload: bytes)
|
||||||
|
|
||||||
Check if payload bytes are msgpack encoded, otherwise skip
|
Check if payload bytes are msgpack encoded, otherwise skip
|
||||||
|
|
||||||
#### *async* protoMap(protocolID: int)
|
#### *async* protoMap(protocolID: int, packetsID, packetCount, sourceNode, submessagesIDs=[], pAction=None)
|
||||||
|
|
||||||
Get protocol from protocol ID using the mlookup table
|
Get protocol from protocol ID using the mlookup table
|
||||||
|
|
||||||
#### *async* protoRoute(completeMessage: dict)
|
|
||||||
|
|
||||||
Route message to proper protocol handler
|
|
||||||
|
|
||||||
#### selfCheck(packet)
|
#### selfCheck(packet)
|
||||||
|
|
||||||
Check if this is a self packet, if so skip
|
Check if this is a self packet, if so skip
|
||||||
|
@ -46,31 +66,3 @@ Check if this is a self packet, if so skip
|
||||||
#### *async* sieve(packet)
|
#### *async* sieve(packet)
|
||||||
|
|
||||||
Base filtering logic, takes a single MeshTastic packet
|
Base filtering logic, takes a single MeshTastic packet
|
||||||
|
|
||||||
<a id="protocols"></a>
|
|
||||||
|
|
||||||
# Protocols
|
|
||||||
|
|
||||||
#### *async* bubble.filter(recipient, recipientNode, onodeID, todo)
|
|
||||||
|
|
||||||
Peer to peer protol
|
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/Sponge/Protocols/bubble.py)
|
|
||||||
|
|
||||||
#### *async* catch.filter(recipient, recipientNode, todo)
|
|
||||||
|
|
||||||
Catch exchange protocol
|
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py)
|
|
||||||
|
|
||||||
#### *async* cryptography.filter(recipientNode, todo)
|
|
||||||
|
|
||||||
Cryptographic operations protocol
|
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/cryptography.py)
|
|
||||||
|
|
||||||
#### *async* map.filter(todo)
|
|
||||||
|
|
||||||
Network mapping protocol
|
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/map.py)
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
# Transceiver: Layer 0 data transceiving
|
# Transceiver: Layer 0 data transceiving
|
||||||
|
|
||||||
### *class* Transceiver.Transceiver.Transceiver(device, filter, onodeID, cache, catch, cryptographyInfo, cLog)
|
### *class* Transceiver.Transceiver.Transceiver(device, filter, onodeID, cache, catch, cryptographyInfo, network)
|
||||||
|
|
||||||
Handling LoRa transceiving
|
Handling LoRa transceiving
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ Reference to run.Node.cLog for logging
|
||||||
Cryptography instance for encrypting transmissions
|
Cryptography instance for encrypting transmissions
|
||||||
|
|
||||||
* **Type:**
|
* **Type:**
|
||||||
[Cryptography.WhaleSong.DHEFern](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern)
|
Cryptography.WhaleSong.DHEFern
|
||||||
|
|
||||||
#### filter
|
#### filter
|
||||||
|
|
||||||
|
@ -114,6 +114,7 @@ Checks if acknowldgement was received per packet and if not resends
|
||||||
#### responseCheck(packet)
|
#### responseCheck(packet)
|
||||||
|
|
||||||
On acknowldgement response set acks based on response
|
On acknowldgement response set acks based on response
|
||||||
|
TODO: Stop this being sent to sieve
|
||||||
|
|
||||||
#### send(packet, recipientNode=False)
|
#### send(packet, recipientNode=False)
|
||||||
|
|
||||||
|
@ -122,6 +123,8 @@ Send individual packet
|
||||||
* **Parameters:**
|
* **Parameters:**
|
||||||
**recipientNode** – If set send to specified node
|
**recipientNode** – If set send to specified node
|
||||||
|
|
||||||
#### *async* sendAnnounce()
|
#### *async* sendAnnounce(dontRespond=False)
|
||||||
|
|
||||||
Send an announce packet (contains basic network mapping information) every so often so new nodes autoconnect
|
Send an announce packet (contains basic network mapping information) every so often so new nodes autoconnect
|
||||||
|
|
||||||
|
#### *async* sendMessage(message: Message)
|
||||||
|
|
196
docs/readme.md
196
docs/readme.md
|
@ -11,7 +11,6 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
|
|
||||||
* [run: PierMesh service runner](/PierMesh/piermesh/src/branch/main/docs/run.md)
|
* [run: PierMesh service runner](/PierMesh/piermesh/src/branch/main/docs/run.md)
|
||||||
* [`Node`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node)
|
* [`Node`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node)
|
||||||
* [`Node.toLog`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.toLog)
|
|
||||||
* [`Node.actions`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.actions)
|
* [`Node.actions`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.actions)
|
||||||
* [`Node.todo`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.todo)
|
* [`Node.todo`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.todo)
|
||||||
* [`Node.network`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.network)
|
* [`Node.network`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.network)
|
||||||
|
@ -20,29 +19,28 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
* [`Node.nodeInfo`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.nodeInfo)
|
* [`Node.nodeInfo`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.nodeInfo)
|
||||||
* [`Node.onodeID`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.onodeID)
|
* [`Node.onodeID`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.onodeID)
|
||||||
* [`Node.oTransceiver`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.oTransceiver)
|
* [`Node.oTransceiver`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.oTransceiver)
|
||||||
* [`Node.processed`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.processed)
|
|
||||||
* [`Node.proc`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.proc)
|
* [`Node.proc`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.proc)
|
||||||
* [`Node.mTasks`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.mTasks)
|
* [`Node.mTasks`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.mTasks)
|
||||||
* [`Node.action_initNodeDH()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_initNodeDH)
|
* [`Node.action_addPSK()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_addPSK)
|
||||||
* [`Node.action_keyDeriveDH()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_keyDeriveDH)
|
* [`Node.action_hop()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_hop)
|
||||||
|
* [`Node.action_initCryptography()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_initCryptography)
|
||||||
* [`Node.action_map()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_map)
|
* [`Node.action_map()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_map)
|
||||||
|
* [`Node.action_routeCatch()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_routeCatch)
|
||||||
|
* [`Node.action_routeHop()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_routeHop)
|
||||||
* [`Node.action_sendCatch()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_sendCatch)
|
* [`Node.action_sendCatch()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_sendCatch)
|
||||||
* [`Node.action_sendToPeer()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_sendToPeer)
|
* [`Node.action_sendToPeer()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_sendToPeer)
|
||||||
* [`Node.cLog()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.cLog)
|
* [`Node.action_syncIndex()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_syncIndex)
|
||||||
|
* [`Node.fsInit()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.fsInit)
|
||||||
|
* [`Node.main()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.main)
|
||||||
|
* [`Node.monitor()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.monitor)
|
||||||
* [`Node.spongeListen()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.spongeListen)
|
* [`Node.spongeListen()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.spongeListen)
|
||||||
* [ui: TUI application](/PierMesh/piermesh/src/branch/main/docs/ui.md)
|
* [tlog](/PierMesh/piermesh/src/branch/main/docs/tlog.md)
|
||||||
* [`TUI`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI)
|
* [`VHandler`](/PierMesh/piermesh/src/branch/main/docs/tlog.md#tlog.VHandler)
|
||||||
* [`TUI.visibleLogo`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.visibleLogo)
|
* [`VHandler.tolog`](/PierMesh/piermesh/src/branch/main/docs/tlog.md#tlog.VHandler.tolog)
|
||||||
* [`TUI.nodeOb`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.nodeOb)
|
* [`VHandler.emit()`](/PierMesh/piermesh/src/branch/main/docs/tlog.md#tlog.VHandler.emit)
|
||||||
* [`TUI.done`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.done)
|
* [`VHandler.tolog`](/PierMesh/piermesh/src/branch/main/docs/tlog.md#id0)
|
||||||
* [`TUI.CSS_PATH`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.CSS_PATH)
|
* [`logUI()`](/PierMesh/piermesh/src/branch/main/docs/tlog.md#tlog.logUI)
|
||||||
* [`TUI.action_quitFull()`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.action_quitFull)
|
* [`runLogUI()`](/PierMesh/piermesh/src/branch/main/docs/tlog.md#tlog.runLogUI)
|
||||||
* [`TUI.action_toggleFullscreen()`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.action_toggleFullscreen)
|
|
||||||
* [`TUI.compose()`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.compose)
|
|
||||||
* [`TUI.do_set_cpu_percent()`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.do_set_cpu_percent)
|
|
||||||
* [`TUI.do_set_mem()`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.do_set_mem)
|
|
||||||
* [`TUI.do_write_line()`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.do_write_line)
|
|
||||||
* [`TUI.on_mount()`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.on_mount)
|
|
||||||
* [Network: Network map representation](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md)
|
* [Network: Network map representation](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md)
|
||||||
* [`Network`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network)
|
* [`Network`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network)
|
||||||
* [`Network.omap`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.omap)
|
* [`Network.omap`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.omap)
|
||||||
|
@ -60,7 +58,9 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
* [`Network.getRoute()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.getRoute)
|
* [`Network.getRoute()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.getRoute)
|
||||||
* [`Network.mimport()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.mimport)
|
* [`Network.mimport()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.mimport)
|
||||||
* [`Network.render()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.render)
|
* [`Network.render()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.render)
|
||||||
|
* [`Network.syncaddLookup()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.syncaddLookup)
|
||||||
* [hopper: Small internet interop utilities](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md)
|
* [hopper: Small internet interop utilities](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md)
|
||||||
|
* [`downloadFile()`](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md#Components.hopper.downloadFile)
|
||||||
* [`get()`](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md#Components.hopper.get)
|
* [`get()`](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md#Components.hopper.get)
|
||||||
* [`post()`](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md#Components.hopper.post)
|
* [`post()`](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md#Components.hopper.post)
|
||||||
* [Daisy based cache](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md)
|
* [Daisy based cache](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md)
|
||||||
|
@ -73,55 +73,102 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
* [`Catch`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch)
|
* [`Catch`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch)
|
||||||
* [`Catch.get()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch.get)
|
* [`Catch.get()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch.get)
|
||||||
* [`Catch.sget()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch.sget)
|
* [`Catch.sget()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch.sget)
|
||||||
|
* [Credential](/PierMesh/piermesh/src/branch/main/docs/Daisy/Credential.md)
|
||||||
|
* [`Credential`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Credential.md#Daisy.Credential.Credential)
|
||||||
|
* [CryptographyUtil](/PierMesh/piermesh/src/branch/main/docs/Daisy/CryptographyUtil.md)
|
||||||
|
* [`SteelPetal`](/PierMesh/piermesh/src/branch/main/docs/Daisy/CryptographyUtil.md#Daisy.CryptographyUtil.SteelPetal)
|
||||||
|
* [`SteelPetal.decrypt()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/CryptographyUtil.md#Daisy.CryptographyUtil.SteelPetal.decrypt)
|
||||||
|
* [`SteelPetal.encrypt()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/CryptographyUtil.md#Daisy.CryptographyUtil.SteelPetal.encrypt)
|
||||||
|
* [`SteelPetal.pad()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/CryptographyUtil.md#Daisy.CryptographyUtil.SteelPetal.pad)
|
||||||
* [Daisy](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md)
|
* [Daisy](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md)
|
||||||
* [`Daisy`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy)
|
* [`Daisy`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy)
|
||||||
* [`Daisy.filepath`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.filepath)
|
* [`Daisy.filepath`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.filepath)
|
||||||
* [`Daisy.msg`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.msg)
|
* [`Daisy.msg`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.msg)
|
||||||
* [`Daisy.get()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.get)
|
* [`Daisy.get()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.get)
|
||||||
|
* [`Daisy.json_to_msg()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.json_to_msg)
|
||||||
* [`Daisy.read()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.read)
|
* [`Daisy.read()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.read)
|
||||||
* [`Daisy.sublist()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.sublist)
|
* [`Daisy.sublist()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.sublist)
|
||||||
* [`Daisy.write()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.write)
|
* [`Daisy.write()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.write)
|
||||||
* [Soil: Daisy signal management](/PierMesh/piermesh/src/branch/main/docs/Daisy/Soil.md)
|
* [Index](/PierMesh/piermesh/src/branch/main/docs/Daisy/Index.md)
|
||||||
* [`Compound`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Soil.md#Daisy.Soil.Compound)
|
* [`Index`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Index.md#Daisy.Index.Index)
|
||||||
* [`Compound.on_any_event()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Soil.md#Daisy.Soil.Compound.on_any_event)
|
* [`Index.addEntry()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Index.md#Daisy.Index.Index.addEntry)
|
||||||
|
* [`Index.search()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Index.md#Daisy.Index.Index.search)
|
||||||
|
* [Ref](/PierMesh/piermesh/src/branch/main/docs/Daisy/Ref.md)
|
||||||
|
* [`Ref`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Ref.md#Daisy.Ref.Ref)
|
||||||
* [Store: Daisy key value store](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md)
|
* [Store: Daisy key value store](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md)
|
||||||
* [`Store`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store)
|
* [`Store`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store)
|
||||||
|
* [`Store.epehemeral`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store.epehemeral)
|
||||||
|
* [`Store.createEmpty()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store.createEmpty)
|
||||||
* [`Store.getRecord()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store.getRecord)
|
* [`Store.getRecord()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store.getRecord)
|
||||||
* [`Store.update()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store.update)
|
* [`Store.update()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md#Daisy.Store.Store.update)
|
||||||
* [WhaleSong: Diffie hellman ephemeral Fernet based encryption](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md)
|
* [WhaleSong](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md)
|
||||||
* [`DHEFern`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern)
|
* [`Transport`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport)
|
||||||
* [`DHEFern.cLog`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.cLog)
|
* [`Transport.loadedKeys`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.loadedKeys)
|
||||||
* [`DHEFern.loadedParams`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.loadedParams)
|
* [`Transport.nodeNickname`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.nodeNickname)
|
||||||
* [`DHEFern.loadedKeys`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.loadedKeys)
|
* [`Transport.cache`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.cache)
|
||||||
* [`DHEFern.nodeNickname`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.nodeNickname)
|
* [`Transport.publicKey`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.publicKey)
|
||||||
* [`DHEFern.cache`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.cache)
|
* [`Transport.privateKey`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.privateKey)
|
||||||
* [`DHEFern.publicKey`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.publicKey)
|
* [`Transport.daisyCryptography`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.daisyCryptography)
|
||||||
* [`DHEFern.privateKey`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.privateKey)
|
* [`Transport.addPeerEphemeralKey()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.addPeerEphemeralKey)
|
||||||
* [`DHEFern.checkInMem()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.checkInMem)
|
* [`Transport.addPublickey()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.addPublickey)
|
||||||
* [`DHEFern.decrypt()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.decrypt)
|
* [`Transport.decrypt()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.decrypt)
|
||||||
* [`DHEFern.encrypt()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.encrypt)
|
* [`Transport.encrypt()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.encrypt)
|
||||||
* [`DHEFern.genKeyPair()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.genKeyPair)
|
* [`Transport.genOurEphemeralKey()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.genOurEphemeralKey)
|
||||||
* [`DHEFern.genParams()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.genParams)
|
* [`Transport.genStaticKey()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.genStaticKey)
|
||||||
* [`DHEFern.getParamsBytes()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.getParamsBytes)
|
* [`Transport.generateSessionKey()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.generateSessionKey)
|
||||||
* [`DHEFern.getRecord()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.getRecord)
|
* [`Transport.getRecord()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.getRecord)
|
||||||
* [`DHEFern.getSalt()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.getSalt)
|
* [`Transport.initStore()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.initStore)
|
||||||
* [`DHEFern.initStore()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.initStore)
|
* [`Transport.kdf()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.kdf)
|
||||||
* [`DHEFern.keyDerive()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.keyDerive)
|
* [`Transport.sessionSetup()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport.sessionSetup)
|
||||||
* [`DHEFern.loadParamBytes()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.loadParamBytes)
|
|
||||||
* [`DHEFern.loadRecordToMem()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.loadRecordToMem)
|
|
||||||
* [base: Primary filtering functionality](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md)
|
* [base: Primary filtering functionality](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md)
|
||||||
* [`Filter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter)
|
* [`Filter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter)
|
||||||
* [`Filter.cache`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.cache)
|
* [`Filter.cache`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.cache)
|
||||||
|
* [`Filter.onodeID`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.onodeID)
|
||||||
|
* [`Filter.todo`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.todo)
|
||||||
|
* [`Filter.cryptographyInfo`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.cryptographyInfo)
|
||||||
|
* [`Filter.messages`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.messages)
|
||||||
|
* [`Filter.submessages`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.submessages)
|
||||||
|
* [`Filter.completed`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.completed)
|
||||||
* [`Filter.mCheck()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.mCheck)
|
* [`Filter.mCheck()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.mCheck)
|
||||||
* [`Filter.protoMap()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.protoMap)
|
* [`Filter.protoMap()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.protoMap)
|
||||||
* [`Filter.protoRoute()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.protoRoute)
|
|
||||||
* [`Filter.selfCheck()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.selfCheck)
|
* [`Filter.selfCheck()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.selfCheck)
|
||||||
* [`Filter.sieve()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.sieve)
|
* [`Filter.sieve()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.sieve)
|
||||||
* [Protocols](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#protocols)
|
* [Sponge.Protocols.Yellow](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md)
|
||||||
* [`bubble.filter()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.bubble.filter)
|
* [`YCTX`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.YCTX)
|
||||||
* [`catch.filter()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.catch.filter)
|
* [`Yellow`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow)
|
||||||
* [`cryptography.filter()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.cryptography.filter)
|
* [`Yellow.yctx`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.yctx)
|
||||||
* [`map.filter()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.map.filter)
|
* [`Yellow.message`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.message)
|
||||||
|
* [`Yellow.submessages`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.submessages)
|
||||||
|
* [`Yellow.submessagesIDs`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.submessagesIDs)
|
||||||
|
* [`Yellow.finishedSubmessages`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.finishedSubmessages)
|
||||||
|
* [`Yellow.dataOrder`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.dataOrder)
|
||||||
|
* [`Yellow.data`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.data)
|
||||||
|
* [`Yellow.nonce`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.nonce)
|
||||||
|
* [`Yellow.tag`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.tag)
|
||||||
|
* [`Yellow.gotHead`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.gotHead)
|
||||||
|
* [`Yellow.todo`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.todo)
|
||||||
|
* [`Yellow.checkComplete()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.checkComplete)
|
||||||
|
* [`Yellow.doAct()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.doAct)
|
||||||
|
* [`Yellow.dump()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.dump)
|
||||||
|
* [`Yellow.id()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.id)
|
||||||
|
* [`Yellow.pActions`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.pActions)
|
||||||
|
* [`Yellow.processPacket()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/Yellow.md#Sponge.Protocols.Yellow.Yellow.processPacket)
|
||||||
|
* [Sponge.Protocols.bubble](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/bubble.md)
|
||||||
|
* [`Bubble`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/bubble.md#Sponge.Protocols.bubble.Bubble)
|
||||||
|
* [`Bubble.pActions`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/bubble.md#Sponge.Protocols.bubble.Bubble.pActions)
|
||||||
|
* [Sponge.Protocols.catch](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/catch.md)
|
||||||
|
* [`Catch`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/catch.md#Sponge.Protocols.catch.Catch)
|
||||||
|
* [`Catch.pActions`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/catch.md#Sponge.Protocols.catch.Catch.pActions)
|
||||||
|
* [Sponge.Protocols.cryptography](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/cryptography.md)
|
||||||
|
* [`CryptographyFilter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/cryptography.md#Sponge.Protocols.cryptography.CryptographyFilter)
|
||||||
|
* [`CryptographyFilter.pActions`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/cryptography.md#Sponge.Protocols.cryptography.CryptographyFilter.pActions)
|
||||||
|
* [Sponge.Protocols.hopper](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/hopper.md)
|
||||||
|
* [`Hopper`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/hopper.md#Sponge.Protocols.hopper.Hopper)
|
||||||
|
* [`Hopper.pActions`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/hopper.md#Sponge.Protocols.hopper.Hopper.pActions)
|
||||||
|
* [Sponge.Protocols.map](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/map.md)
|
||||||
|
* [`Map`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/map.md#Sponge.Protocols.map.Map)
|
||||||
|
* [`Map.pActions`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/map.md#Sponge.Protocols.map.Map.pActions)
|
||||||
|
* [`Map.process()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/Protocols/map.md#Sponge.Protocols.map.Map.process)
|
||||||
* [Header packet: Metadata packet](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md)
|
* [Header packet: Metadata packet](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md)
|
||||||
* [`Header`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header)
|
* [`Header`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header)
|
||||||
* [`Header.sender`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.sender)
|
* [`Header.sender`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.sender)
|
||||||
|
@ -147,7 +194,23 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
* [SubMessage: Additional data for larger messages](/PierMesh/piermesh/src/branch/main/docs/Packets/SubMessage.md)
|
* [SubMessage: Additional data for larger messages](/PierMesh/piermesh/src/branch/main/docs/Packets/SubMessage.md)
|
||||||
* [`SubMessage`](/PierMesh/piermesh/src/branch/main/docs/Packets/SubMessage.md#Packets.SubMessage.SubMessage)
|
* [`SubMessage`](/PierMesh/piermesh/src/branch/main/docs/Packets/SubMessage.md#Packets.SubMessage.SubMessage)
|
||||||
* [SubPacket: Packets for submessages](/PierMesh/piermesh/src/branch/main/docs/Packets/SubPacket.md)
|
* [SubPacket: Packets for submessages](/PierMesh/piermesh/src/branch/main/docs/Packets/SubPacket.md)
|
||||||
* [`SubPacket`](/PierMesh/piermesh/src/branch/main/docs/Packets/SubPacket.md#Packets.SubPacket.SubPacket)
|
* [`SubMessage`](/PierMesh/piermesh/src/branch/main/docs/Packets/SubPacket.md#Packets.SubMessage.SubMessage)
|
||||||
|
* [bubble.Bubble](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/bubble/Bubble.md)
|
||||||
|
* [`Bubble`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/bubble/Bubble.md#Packets.Messages.Protocols.bubble.Bubble.Bubble)
|
||||||
|
* [catch.IndexSync](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/catch/IndexSync.md)
|
||||||
|
* [`IndexSync`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/catch/IndexSync.md#Packets.Messages.Protocols.catch.IndexSync.IndexSync)
|
||||||
|
* [catch.Request](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/catch/Request.md)
|
||||||
|
* [`CatchRequest`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/catch/Request.md#Packets.Messages.Protocols.catch.Request.CatchRequest)
|
||||||
|
* [catch.Response](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/catch/Response.md)
|
||||||
|
* [`CatchResponse`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/catch/Response.md#Packets.Messages.Protocols.catch.Response.CatchResponse)
|
||||||
|
* [cryptography.Handshake](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/cryptography/Handshake.md)
|
||||||
|
* [`Handshake`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/cryptography/Handshake.md#Packets.Messages.Protocols.cryptography.Handshake.Handshake)
|
||||||
|
* [hopper.Request](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/hopper/Request.md)
|
||||||
|
* [`HopperRequest`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/hopper/Request.md#Packets.Messages.Protocols.hopper.Request.HopperRequest)
|
||||||
|
* [hopper.Response](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/hopper/Response.md)
|
||||||
|
* [`HopperResponse`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/hopper/Response.md#Packets.Messages.Protocols.hopper.Response.HopperResponse)
|
||||||
|
* [map.Announce](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/map/Announce.md)
|
||||||
|
* [`AnnounceMessage`](/PierMesh/piermesh/src/branch/main/docs/Packets/Messages/Protocols/map/Announce.md#Packets.Messages.Protocols.map.Announce.AnnounceMessage)
|
||||||
* [Transceiver: Layer 0 data transceiving](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md)
|
* [Transceiver: Layer 0 data transceiving](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md)
|
||||||
* [`Transceiver`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver)
|
* [`Transceiver`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver)
|
||||||
* [`Transceiver.cLog`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.cLog)
|
* [`Transceiver.cLog`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.cLog)
|
||||||
|
@ -170,17 +233,32 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
* [`Transceiver.responseCheck()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.responseCheck)
|
* [`Transceiver.responseCheck()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.responseCheck)
|
||||||
* [`Transceiver.send()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.send)
|
* [`Transceiver.send()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.send)
|
||||||
* [`Transceiver.sendAnnounce()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.sendAnnounce)
|
* [`Transceiver.sendAnnounce()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.sendAnnounce)
|
||||||
|
* [`Transceiver.sendMessage()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.sendMessage)
|
||||||
* [serve: Web UI server](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md)
|
* [serve: Web UI server](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md)
|
||||||
* [`Server`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server)
|
* [`Server`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server)
|
||||||
* [`Server.cLog`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.cLog)
|
* [`Server.transceiver`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.transceiver)
|
||||||
* [`Server.transmitter`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.transmitter)
|
|
||||||
* [`Server.network`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.network)
|
* [`Server.network`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.network)
|
||||||
* [`Server.nodeID`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.nodeID)
|
* [`Server.nodeID`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.nodeID)
|
||||||
* [`Server.peerIDs`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.peerIDs)
|
* [`Server.peerIDs`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.peerIDs)
|
||||||
* [`Server.app`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.app)
|
* [`Server.app`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.app)
|
||||||
* [`Server.catch`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.catch)
|
* [`Server.catch`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.catch)
|
||||||
|
* [`Server.getPSKs()`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.getPSKs)
|
||||||
* [`Server.sendToPeer()`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.sendToPeer)
|
* [`Server.sendToPeer()`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.sendToPeer)
|
||||||
|
* [Context](/PierMesh/piermesh/src/branch/main/docs/Config/Context.md)
|
||||||
|
* [`Context`](/PierMesh/piermesh/src/branch/main/docs/Config/Context.md#Config.Context.Context)
|
||||||
|
* [`Context.ctx`](/PierMesh/piermesh/src/branch/main/docs/Config/Context.md#Config.Context.Context.ctx)
|
||||||
|
* [Services.Action](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md)
|
||||||
|
* [`Action`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action)
|
||||||
|
* [`Action.action`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.action)
|
||||||
|
* [`Action.data`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.data)
|
||||||
|
* [`Action.sender`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.sender)
|
||||||
|
* [`Action.senderID`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.senderID)
|
||||||
|
* [`Action.sourceNode`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.sourceNode)
|
||||||
|
* [`Action.recipient`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.recipient)
|
||||||
|
* [`Action.recipientNode`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.recipientNode)
|
||||||
|
* [`Action.getAction()`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.getAction)
|
||||||
|
* [`Action.getData()`](/PierMesh/piermesh/src/branch/main/docs/Services/Action.md#Services.Action.Action.getData)
|
||||||
|
$
|
||||||
|
|
||||||
# System Overview
|
# System Overview
|
||||||
|
|
||||||
|
@ -188,17 +266,17 @@ PierMesh has two main events loops to learn about: the TUI and the service.
|
||||||
|
|
||||||
## TUI
|
## TUI
|
||||||
|
|
||||||
[🔗 Docs](https://git.utopic.work/PierMesh/piermesh/src/branch/main/docs/ui.md)
|
[🔗 Docs](https://git.utopic.work/PierMesh/piermesh/src/branch/main/docs/tlog.md)
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/ui.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/tlog.py)
|
||||||
|
|
||||||
The TUI is provided via [Textual's](https://textual.textualize.io/) library. It's a relatively simple application that gives us a quick overview of system statistics in the way of memory and cpu usage as well as scrollable logs. You can toggle full screen logs with f and close the TUI and service with q.
|
The TUI is provided via [curses](https://docs.python.org/3/howto/curses.html) library. It's a relatively simple application that gives us a quick overview of system statistics in the way of memory and cpu usage as well as logs you can scroll with the directional keys
|
||||||
|
|
||||||
## The Service
|
## The Service
|
||||||
|
|
||||||
PierMesh runs a number of loops under the hood. These are primarily initialized in the main loop of run with a special logging loop outside of that.
|
PierMesh runs a number of loops under the hood. These are primarily initialized in the main loop of run with a special logging loop outside of that.
|
||||||
|
|
||||||
Note that we make heavy use of [Meshtastic's Python API](https://github.com/meshtastic/python).
|
Note that we make use of [Meshtastic's Python API](https://github.com/meshtastic/python).
|
||||||
|
|
||||||
### run
|
### run
|
||||||
|
|
||||||
|
@ -210,18 +288,18 @@ Note that we make heavy use of [Meshtastic's Python API](https://github.com/mesh
|
||||||
|
|
||||||
In run.main we (in order)
|
In run.main we (in order)
|
||||||
1. Initialize the `Node` class to a global `nodeOb`.
|
1. Initialize the `Node` class to a global `nodeOb`.
|
||||||
2. Wait for x seconds determined by command line arguments (see the falin and marcille scripts in scripts)
|
2. Wait for x seconds determined by command line arguments (see the falin script in scripts)
|
||||||
3. Initialize our `Transceiver` class (and the corresponding hardware)
|
3. Initialize our `Transceiver` class (and the corresponding hardware)
|
||||||
4. Initialize our `Server` class
|
4. Initialize our `Server` class
|
||||||
5. Create an async task running the `Node.spongeListen` method which looks for updates from the filter system
|
5. Create an async task running the `Node.spongeListen` method which looks for updates from the filter system
|
||||||
6. Kick the loop on with `await asyncio.sleep(1)` (we do this to kick on all async loops so I will omit this step going forward)
|
6. Kick the loop on with `await asyncio.sleep(1)` (we do this to kick on all async loops so I will omit this step going forward)
|
||||||
7. Create an async task running the `Transceiver.checkProgress` method which checks for packet reception progress and resends packets if necessary
|
7. Create an async task running the `Transceiver.checkProgress` method which checks for packet reception progress and resends packets if necessary (currently not in use)
|
||||||
8. Create an async task running `Node.monitor` which checks and reports system usage
|
8. Create an async task running `Node.monitor` which checks and reports system usage
|
||||||
9. Create an async task running the `Transceiver.announce` method which broadcasts a `Packets.Message` containing network mapping information
|
9. Create an async task running the `Transceiver.announce` method which broadcasts a `Packets.Message` containing network mapping information
|
||||||
10. Last we start the `Microdot` server loop
|
10. Last we start the `Microdot` server loop
|
||||||
|
|
||||||
### ..
|
### ..
|
||||||
Travelling out of the run.main thread to the primary ____main____ code we see the other two threads: one running the `logPassLoop` loop which is created in the same way as the main thread: `lplThread = threading.Thread(target=asyncio.run, args=(logPassLoop(),))` and one running the TUI loop which has it's own system we just kick on by instantiating the TUI class and calling .run() on.
|
Travelling out of the run.main thread to the primary ____main____ code we see the other thread running the TUI loop which has it's own system we just kick on by running .runLogUI
|
||||||
|
|
||||||
## Packet lifecycle
|
## Packet lifecycle
|
||||||
|
|
||||||
|
|
68
docs/run.md
68
docs/run.md
|
@ -8,13 +8,6 @@ Class that handles most of the PierMesh data
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/run.py)
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/run.py)
|
||||||
|
|
||||||
#### toLog
|
|
||||||
|
|
||||||
We store logs to be processed here
|
|
||||||
|
|
||||||
* **Type:**
|
|
||||||
list
|
|
||||||
|
|
||||||
#### actions
|
#### actions
|
||||||
|
|
||||||
Dictionary mapping methods with the action prefix to the method name after action dynamically to be called through Sponge (Sponge.base) filtering
|
Dictionary mapping methods with the action prefix to the method name after action dynamically to be called through Sponge (Sponge.base) filtering
|
||||||
|
@ -71,13 +64,6 @@ LoRa transceiver Transceiver
|
||||||
* **Type:**
|
* **Type:**
|
||||||
[Transceiver](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver)
|
[Transceiver](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver)
|
||||||
|
|
||||||
#### processed
|
|
||||||
|
|
||||||
List of IDs of already completed messages so that we don’t reprocess messages
|
|
||||||
|
|
||||||
* **Type:**
|
|
||||||
list
|
|
||||||
|
|
||||||
#### proc
|
#### proc
|
||||||
|
|
||||||
This process (psutil.Process), used for managing and monitoring PierMesh
|
This process (psutil.Process), used for managing and monitoring PierMesh
|
||||||
|
@ -92,29 +78,37 @@ Dictionary of PierMesh service tasks
|
||||||
* **Type:**
|
* **Type:**
|
||||||
dict
|
dict
|
||||||
|
|
||||||
#### SEE ALSO
|
#### *async* action_addPSK(data)
|
||||||
`logPassLoop`
|
|
||||||
: Loop to handle logging to file and TUI
|
|
||||||
|
|
||||||
#### *async* action_initNodeDH(data: dict)
|
Action to add PSK for specific node, currently unused
|
||||||
|
|
||||||
Initialize diffie hellman key exchange
|
#### *async* action_hop(data)
|
||||||
|
|
||||||
|
Proxy a request to the main internet (in the future cross protocol/link)
|
||||||
|
|
||||||
|
#### *async* action_initCryptography(data: dict)
|
||||||
|
|
||||||
|
Initialize AES-GCM encrypted transport session
|
||||||
|
|
||||||
#### SEE ALSO
|
#### SEE ALSO
|
||||||
`Cryptography.DHEFern.DHEFern`
|
[`Cryptography.WhaleSong.Transport`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.Transport)
|
||||||
: End to end encryption functionality
|
: End to end encryption functionality
|
||||||
|
|
||||||
#### *async* action_keyDeriveDH(data: dict)
|
|
||||||
|
|
||||||
Derive key via diffie hellman key exchange
|
|
||||||
|
|
||||||
#### *async* action_map(data: dict)
|
#### *async* action_map(data: dict)
|
||||||
|
|
||||||
Map new network data to internal network map
|
Map new network data to internal network map
|
||||||
|
|
||||||
#### SEE ALSO
|
#### SEE ALSO
|
||||||
`Siph.network.Network`
|
`Siph.network.Network`
|
||||||
: Layered graph etwork representation
|
: Layered graph network representation
|
||||||
|
|
||||||
|
#### *async* action_routeCatch(data: dict)
|
||||||
|
|
||||||
|
Route received catch to peer who requested it
|
||||||
|
|
||||||
|
#### *async* action_routeHop(data: dict)
|
||||||
|
|
||||||
|
Return proxy request results to requester
|
||||||
|
|
||||||
#### *async* action_sendCatch(data: dict)
|
#### *async* action_sendCatch(data: dict)
|
||||||
|
|
||||||
|
@ -131,21 +125,27 @@ Send data to a peer connected to the server
|
||||||
`Sponge.Protocols`
|
`Sponge.Protocols`
|
||||||
: Protocol based packet filtering
|
: Protocol based packet filtering
|
||||||
|
|
||||||
`webui.serve.Server`
|
[`Splash.serve.Server`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server)
|
||||||
: Runs a light Microdot web server with http/s and websocket functionality
|
: Runs a light Microdot web server with http/s and websocket functionality
|
||||||
|
|
||||||
`webui.serve.Server.sendToPeer`
|
[`Splash.serve.Server.sendToPeer`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.sendToPeer)
|
||||||
: Function to actually execute the action
|
: Function to actually execute the action
|
||||||
|
|
||||||
#### cLog(priority: int, message: str)
|
#### *async* action_syncIndex(data: dict)
|
||||||
|
|
||||||
Convenience function that logs to the ui and log files
|
Add received index entries to Catch via the a remote Catch index
|
||||||
|
|
||||||
* **Parameters:**
|
#### *async* fsInit()
|
||||||
* **priority** (*int*) – Priority of message to be passed to logging
|
|
||||||
* **message** (*str*) – Message to log
|
Initialize the file system for use
|
||||||
* **Return type:**
|
|
||||||
None
|
#### *async* main()
|
||||||
|
|
||||||
|
Main loop, sets up the message listening, system monitoring and server running loops
|
||||||
|
|
||||||
|
#### *async* monitor()
|
||||||
|
|
||||||
|
Monitor and log ram and cpu usage
|
||||||
|
|
||||||
#### *async* spongeListen()
|
#### *async* spongeListen()
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
<a id="soil-daisy-signal-management"></a>
|
||||||
|
|
||||||
|
# Soil: Daisy signal management
|
|
@ -0,0 +1,3 @@
|
||||||
|
<a id="ui-tui-application"></a>
|
||||||
|
|
||||||
|
# ui: TUI application
|
|
@ -0,0 +1,35 @@
|
||||||
|
<a id="module-tlog"></a>
|
||||||
|
|
||||||
|
<a id="tlog"></a>
|
||||||
|
|
||||||
|
# tlog
|
||||||
|
|
||||||
|
### *class* tlog.VHandler(level, tolog)
|
||||||
|
|
||||||
|
Custom log handler to push logs into a thread-safe queue so the TUI can read them
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/tlog.py)
|
||||||
|
|
||||||
|
#### tolog
|
||||||
|
|
||||||
|
Thread-safe log queue
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
Queue.queue
|
||||||
|
|
||||||
|
#### emit(record)
|
||||||
|
|
||||||
|
Do whatever it takes to actually log the specified logging record.
|
||||||
|
|
||||||
|
This version is intended to be implemented by subclasses and so
|
||||||
|
raises a NotImplementedError.
|
||||||
|
|
||||||
|
#### tolog *= <queue.Queue object>*
|
||||||
|
|
||||||
|
### tlog.logUI(stdscr, tolog, nodeNickname)
|
||||||
|
|
||||||
|
TUI loop
|
||||||
|
|
||||||
|
### tlog.runLogUI(tolog, nodeNickname)
|
||||||
|
|
||||||
|
Some required kludge
|
74
docs/ui.md
74
docs/ui.md
|
@ -1,74 +0,0 @@
|
||||||
<a id="ui-tui-application"></a>
|
|
||||||
|
|
||||||
# ui: TUI application
|
|
||||||
|
|
||||||
### *class* ui.TUI(driver_class: Type[Driver] | None = None, css_path: str | PurePath | List[str | PurePath] | None = None, watch_css: bool = False)
|
|
||||||
|
|
||||||
TUI for PierMesh
|
|
||||||
|
|
||||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/ui.py)
|
|
||||||
|
|
||||||
#### visibleLogo
|
|
||||||
|
|
||||||
Whether the logo is visible or not, used in toggling visibility
|
|
||||||
|
|
||||||
* **Type:**
|
|
||||||
bool
|
|
||||||
|
|
||||||
#### nodeOb
|
|
||||||
|
|
||||||
Reference to the Node running the PierMesh service
|
|
||||||
|
|
||||||
* **Type:**
|
|
||||||
[Node](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node)
|
|
||||||
|
|
||||||
#### done
|
|
||||||
|
|
||||||
Whether the TUI has been killed
|
|
||||||
|
|
||||||
* **Type:**
|
|
||||||
bool
|
|
||||||
|
|
||||||
#### CSS_PATH *: ClassVar[CSSPathType | None]* *= 'ui.tcss'*
|
|
||||||
|
|
||||||
File paths to load CSS from.
|
|
||||||
|
|
||||||
#### action_quitFull()
|
|
||||||
|
|
||||||
Kill the whole stack by setting self to done and terminating the thread. We check in run.monitor later and kill the rest of the stack then with psutil
|
|
||||||
|
|
||||||
#### SEE ALSO
|
|
||||||
`run.monitor`
|
|
||||||
|
|
||||||
#### action_toggleFullscreen()
|
|
||||||
|
|
||||||
Toggle fullscreen logs by either collapsing width or setting it to it’s original size
|
|
||||||
|
|
||||||
#### compose()
|
|
||||||
|
|
||||||
Build the TUI
|
|
||||||
|
|
||||||
#### do_set_cpu_percent(percent: float)
|
|
||||||
|
|
||||||
Set CPU percent in the label and progress bar
|
|
||||||
|
|
||||||
* **Parameters:**
|
|
||||||
**percent** (*float*) – Percent of the cpu PierMesh is using
|
|
||||||
|
|
||||||
#### do_set_mem(memmb: float)
|
|
||||||
|
|
||||||
Set memory usage label in the ui
|
|
||||||
|
|
||||||
* **Parameters:**
|
|
||||||
**memmb** (*float*) – Memory usage of PierMesh in megabytes
|
|
||||||
|
|
||||||
#### do_write_line(logLine: str)
|
|
||||||
|
|
||||||
Write line to the logs panel
|
|
||||||
|
|
||||||
* **Parameters:**
|
|
||||||
**logLine** (*str*) – Line to log
|
|
||||||
|
|
||||||
#### on_mount()
|
|
||||||
|
|
||||||
Called at set up, configures the title and the progess bar
|
|
|
@ -1,3 +1,5 @@
|
||||||
home = /usr/bin
|
home = /usr/bin
|
||||||
include-system-site-packages = false
|
include-system-site-packages = false
|
||||||
version = 3.10.12
|
version = 3.11.2
|
||||||
|
executable = /usr/bin/python3.11
|
||||||
|
command = /usr/bin/python3 -m venv /home/ag/Documents/piermesh
|
||||||
|
|
30
readme.md
30
readme.md
|
@ -2,8 +2,6 @@
|
||||||
# PierMesh
|
# PierMesh
|
||||||
## A new internet, a fresh start
|
## A new internet, a fresh start
|
||||||
|
|
||||||
This is the monorepo for PierMesh.
|
|
||||||
|
|
||||||
# Docs
|
# Docs
|
||||||
|
|
||||||
You can find the full docs here: [docs/](https://git.utopic.work/PierMesh/piermesh/src/branch/main/docs)
|
You can find the full docs here: [docs/](https://git.utopic.work/PierMesh/piermesh/src/branch/main/docs)
|
||||||
|
@ -15,25 +13,43 @@ Note: these instructions will probably only work on Linux at the moment
|
||||||
Note: check the scripts to make sure they'll work with your system, and in general I recommend checking scripts before you run them
|
Note: check the scripts to make sure they'll work with your system, and in general I recommend checking scripts before you run them
|
||||||
|
|
||||||
Follow Meshtastic's guide on setting up your device: [https://meshtastic.org/docs/getting-started/](https://meshtastic.org/docs/getting-started/)
|
Follow Meshtastic's guide on setting up your device: [https://meshtastic.org/docs/getting-started/](https://meshtastic.org/docs/getting-started/)
|
||||||
|
|
||||||
Make sure you have the latest Python installed
|
Make sure you have the latest Python installed
|
||||||
|
|
||||||
|
Make sure you have pip and venv installed
|
||||||
|
|
||||||
```
|
```
|
||||||
git clone https://git.utopic.work/PierMesh/piermesh
|
git clone https://git.utopic.work/PierMesh/piermesh
|
||||||
|
|
||||||
cd piermesh
|
cd piermesh
|
||||||
|
|
||||||
python -m venv .
|
python -m venv .venv
|
||||||
|
|
||||||
source bin/activate
|
source .venv/bin/activate
|
||||||
|
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
|
|
||||||
cd src
|
cd src
|
||||||
|
|
||||||
chmod a+x ./scripts/falin
|
cp .piermesh.example .piermesh
|
||||||
|
|
||||||
./scripts/falin
|
Set the TransceiverPort (you should have this from setting up your Meshtastic device)
|
||||||
|
|
||||||
|
Set the PSK, this should match the PSK of node's you want to connect with
|
||||||
|
|
||||||
|
python run.py
|
||||||
```
|
```
|
||||||
|
You should now be able to access the web interface at port 5000 of the machine you set up on (likely localhost:5000)
|
||||||
|
|
||||||
|
Make sure to click connect at the top of the page to initialize your websocket connection, after this you can use the utilities provided
|
||||||
|
|
||||||
|
Hopper: proxy requests to the main internet
|
||||||
|
|
||||||
|
Catch: browse for a Catch/website on PierMesh
|
||||||
|
|
||||||
|
Catch Editor: create and publish a Catch onto PierMesh
|
||||||
|
|
||||||
|
Bubble: send peer to peer messages (and art using the pixel art tool) to other nodes
|
||||||
|
|
||||||
# License text
|
# License text
|
||||||
|
|
||||||
|
@ -42,5 +58,5 @@ This program is free software: you can redistribute it and/or modify it under th
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||||
|
|
||||||
You should have received a copy of the GNU General Public License along with this program. If not, see [https://www.gnu.org/licenses/](https://www.gnu.org/licenses/).
|
You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
|
||||||
```
|
```
|
||||||
|
|
|
@ -13,3 +13,7 @@ sphinx
|
||||||
textual
|
textual
|
||||||
textual-dev
|
textual-dev
|
||||||
sphinx-markdown-builder==0.6.6
|
sphinx-markdown-builder==0.6.6
|
||||||
|
pycryptodome
|
||||||
|
pyjwt
|
||||||
|
uvloop
|
||||||
|
python-lsp-server[all]
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
# DONT TOUCH THIS SECTION UNLESS YOU KNOW WHAT YOURE DOING
|
||||||
|
[DEFAULT]
|
||||||
|
Nickname = node1
|
||||||
|
StartupDelay = 0
|
||||||
|
WebUIPort = 5000
|
||||||
|
ShowTUI = True
|
||||||
|
|
||||||
|
[OPERATOR_REQUIRED]
|
||||||
|
TransceiverPort = /dev/ttyACM0
|
||||||
|
PSK = jgf765!FS0+6
|
||||||
|
|
||||||
|
# DO YOUR NON REQUIRED SETTINGS HERE
|
||||||
|
[OPERATOR_OVERRIDES]
|
||||||
|
|
||||||
|
ShowTUI = False
|
|
@ -0,0 +1,13 @@
|
||||||
|
# DONT TOUCH THIS SECTION UNLESS YOU KNOW WHAT YOURE DOING
|
||||||
|
[DEFAULT]
|
||||||
|
Nickname = node00
|
||||||
|
StartupDelay = 0
|
||||||
|
WebUIPort = 5000
|
||||||
|
ShowTUI = True
|
||||||
|
|
||||||
|
[OPERATOR_REQUIRED]
|
||||||
|
# TransceiverPort = /dev/ttyACM0
|
||||||
|
|
||||||
|
# DO YOUR SETTINGS HERE
|
||||||
|
[OPERATOR_OVERRIDES]
|
||||||
|
|
|
@ -1,10 +1,41 @@
|
||||||
|
# NOTE: Used for requesting web pages
|
||||||
import requests
|
import requests
|
||||||
import msgpack
|
|
||||||
import lzma
|
# NOTE: Used for parsing web pages
|
||||||
from Packets.Message import Message
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
# NOTE: Generic imports
|
||||||
|
import base64
|
||||||
|
import mimetypes
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
|
|
||||||
def get(url: str, params=None):
|
def downloadFile(url, text=True, mimeType=None):
|
||||||
|
"""
|
||||||
|
Download resource from url and convert it to text or a data url
|
||||||
|
"""
|
||||||
|
fbytes = b""
|
||||||
|
with requests.get(url, stream=True) as r:
|
||||||
|
r.raise_for_status()
|
||||||
|
for chunk in r.iter_content(chunk_size=8192):
|
||||||
|
fbytes += chunk
|
||||||
|
if text:
|
||||||
|
return fbytes.decode("utf-8")
|
||||||
|
else:
|
||||||
|
if mimeType == None:
|
||||||
|
mimeType, encoding = mimetypes.guess_type(url)
|
||||||
|
if mimeType == None:
|
||||||
|
raise ValueError(
|
||||||
|
"Couldnt guess mime type and none was supplied, cant encode to data url"
|
||||||
|
)
|
||||||
|
b64str = base64.b64encode(fbytes).decode("utf-8")
|
||||||
|
dataUrl = "data:{0};base64,{1}".format(mimeType, b64str)
|
||||||
|
return dataUrl
|
||||||
|
|
||||||
|
|
||||||
|
def get(url: str, params=None, followTags=None):
|
||||||
"""
|
"""
|
||||||
http/s get request
|
http/s get request
|
||||||
|
|
||||||
|
@ -14,10 +45,44 @@ def get(url: str, params=None):
|
||||||
|
|
||||||
params
|
params
|
||||||
Requests (library) parameters
|
Requests (library) parameters
|
||||||
|
|
||||||
|
followTags
|
||||||
|
None or list of tags to download the src/href from
|
||||||
"""
|
"""
|
||||||
|
logger.debug("Hopping to it")
|
||||||
|
# TODO: Non blocking requests
|
||||||
|
# WARN: Do not run self requests until this is fixed
|
||||||
r = requests.get(url, params=params)
|
r = requests.get(url, params=params)
|
||||||
r = {"response": r.text, "code": r.status_code}
|
logger.debug("Content retrieved, parsing")
|
||||||
return Message(lzma.compress(msgpack.dumps(r))).get()
|
r = {
|
||||||
|
"response": r.text,
|
||||||
|
"code": r.status_code,
|
||||||
|
"content-type": r.headers.get("content-type"),
|
||||||
|
}
|
||||||
|
logger.debug("Done parsing")
|
||||||
|
|
||||||
|
# TODO: Reject followtags if content type is other then html
|
||||||
|
if followTags != None:
|
||||||
|
soup = BeautifulSoup(r["response"], "html.parser")
|
||||||
|
# TODO: Checking for relative links
|
||||||
|
for tag in followTags:
|
||||||
|
if tag in ["img", "video"]:
|
||||||
|
for elem in soup.find_all(tag):
|
||||||
|
elem["src"] = downloadFile(elem["src"], text=False)
|
||||||
|
elif tag in ["link"]:
|
||||||
|
for elem in soup.find_all(tag):
|
||||||
|
if elem["rel"] == "stylesheet":
|
||||||
|
style = downloadFile(elem["href"])
|
||||||
|
elem.decompose()
|
||||||
|
soup.head.append_tag(soup.new_tag("style", string=style))
|
||||||
|
elif tag == "script":
|
||||||
|
for elem in soup.find_all(tag):
|
||||||
|
script = downloadFile(elem["src"])
|
||||||
|
elem["src"] = ""
|
||||||
|
elem.string = script
|
||||||
|
r["response"] = soup.text
|
||||||
|
logger.debug("Done hopping")
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
def post(url: str, params=None):
|
def post(url: str, params=None):
|
||||||
|
@ -33,4 +98,4 @@ def post(url: str, params=None):
|
||||||
"""
|
"""
|
||||||
r = requests.post(url, data=params)
|
r = requests.post(url, data=params)
|
||||||
r = {"response": r.text, "code": r.status_code}
|
r = {"response": r.text, "code": r.status_code}
|
||||||
return Message(lzma.compress(msgpack.dumps(r))).get()
|
return r
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
|
||||||
|
|
||||||
|
def byFile():
|
||||||
|
pass
|
||||||
|
|
||||||
|
def byPrompt():
|
||||||
|
pass
|
|
@ -0,0 +1,39 @@
|
||||||
|
|
||||||
|
class Context:
|
||||||
|
"""
|
||||||
|
Generic context data structure, current subclassed for use in filters, see Sponge/Protocols/Yellow.py
|
||||||
|
|
||||||
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Config/Context.py>`__
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
ctx: dict
|
||||||
|
Dictionary of context values
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(self, subsets: dict={}, **kwargs):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
subsets: dict
|
||||||
|
Keys mapped to lists of keys for grouping values (unused currently)
|
||||||
|
kwargs: kwargs
|
||||||
|
key word arguments to map to the context as key/val pairs
|
||||||
|
"""
|
||||||
|
self.ctx = {}
|
||||||
|
self.subsets = subsets
|
||||||
|
for key, val in kwargs.items():
|
||||||
|
self.ctx[key] = {
|
||||||
|
"val": val,
|
||||||
|
"type": type(val)
|
||||||
|
}
|
||||||
|
|
||||||
|
def __getitem__(self, idx):
|
||||||
|
return self.ctx[idx]
|
||||||
|
|
||||||
|
def getSubset(self, subset):
|
||||||
|
if subset in self.subsets.keys():
|
||||||
|
res = {}
|
||||||
|
for key in self.subsets[subset]:
|
||||||
|
res[key] = self.ctx[key]["val"]
|
||||||
|
return res
|
|
@ -0,0 +1,5 @@
|
||||||
|
Context
|
||||||
|
=======
|
||||||
|
|
||||||
|
.. autoclass:: Config.Context.Context
|
||||||
|
:members:
|
|
@ -1,35 +1,33 @@
|
||||||
import base64
|
# NOTE: Generic imports
|
||||||
import os
|
import os
|
||||||
from cryptography.fernet import Fernet
|
import lzma
|
||||||
from cryptography.hazmat.primitives import hashes
|
import logging
|
||||||
from cryptography.hazmat.primitives.asymmetric import dh
|
import traceback
|
||||||
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
|
||||||
from cryptography.hazmat.primitives.serialization import (
|
# NOTE: Import for handling message data
|
||||||
Encoding,
|
|
||||||
NoEncryption,
|
|
||||||
ParameterFormat,
|
|
||||||
PublicFormat,
|
|
||||||
PrivateFormat,
|
|
||||||
)
|
|
||||||
import cryptography.hazmat.primitives.serialization as Serialization
|
|
||||||
import msgpack
|
import msgpack
|
||||||
|
|
||||||
|
# NOTE: Cryptography imports
|
||||||
|
from Crypto.PublicKey import ECC
|
||||||
|
from Crypto.Hash import SHAKE128
|
||||||
|
from Crypto.Protocol.DH import key_agreement
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
|
||||||
|
# NOTE: Daisy database import
|
||||||
from Daisy.Store import Store
|
from Daisy.Store import Store
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
# TODO: Different store directories per node
|
# TODO: Different store directories per node
|
||||||
|
|
||||||
|
class Transport:
|
||||||
class DHEFern:
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py>`__
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py>`__
|
||||||
|
|
||||||
Attributes
|
Attributes
|
||||||
----------
|
----------
|
||||||
cLog
|
|
||||||
Method reference to `run.Node.cLog` so we can log to the ui from here
|
|
||||||
|
|
||||||
loadedParams: dict
|
|
||||||
In memory representations of cryptography parameters
|
|
||||||
|
|
||||||
loadedKeys: dict
|
loadedKeys: dict
|
||||||
In memory representations of cryptography keys
|
In memory representations of cryptography keys
|
||||||
|
@ -37,7 +35,7 @@ class DHEFern:
|
||||||
nodeNickname: str
|
nodeNickname: str
|
||||||
Name of node for isolating configs when running multiple nodes
|
Name of node for isolating configs when running multiple nodes
|
||||||
|
|
||||||
cache: Components.daisy.Cache
|
cache: Daisy.Cache.Cache
|
||||||
Daisy cache for use in storing cryptography information
|
Daisy cache for use in storing cryptography information
|
||||||
|
|
||||||
publicKey
|
publicKey
|
||||||
|
@ -45,93 +43,71 @@ class DHEFern:
|
||||||
|
|
||||||
privateKey
|
privateKey
|
||||||
Private key for node
|
Private key for node
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cache, nodeNickname, cLog):
|
daisyCryptography: Daisy.CryptographyUtil.SteelPetal
|
||||||
|
Record cryptography reference
|
||||||
|
"""
|
||||||
|
def __init__(self, cache, nodeNickname, daisyCryptography, psk):
|
||||||
"""
|
"""
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
cache: Components.daisy.Cache
|
cache: Daisy.Cache.Cache
|
||||||
Reference to the node instances Daisy cache
|
Reference to the node instances Daisy cache
|
||||||
|
|
||||||
nodeNickname: str
|
nodeNickname: str
|
||||||
Node nickname for record storage
|
Node nickname for record storage
|
||||||
|
|
||||||
cLog
|
daisyCryptography: Daisy.CryptographyUtil.SteelPetal
|
||||||
Reference to `run.Node.cLog`
|
Record cryptography reference
|
||||||
|
|
||||||
|
psk: str
|
||||||
|
Plaintext pre shared key
|
||||||
"""
|
"""
|
||||||
self.cLog = cLog
|
|
||||||
self.stores = {}
|
self.stores = {}
|
||||||
self.loadedParams = {}
|
|
||||||
self.loadedKeys = {}
|
|
||||||
self.nodeNickname = nodeNickname
|
self.nodeNickname = nodeNickname
|
||||||
self.cache = cache
|
self.cache = cache
|
||||||
if os.path.exists("daisy/cryptography/{0}/param".format(nodeNickname)) == False:
|
self.daisyCryptography = daisyCryptography
|
||||||
self.initStore("param")
|
if not os.path.exists("{0}/daisy/cryptography/key".format(nodeNickname)):
|
||||||
else:
|
logger.info("Key store DNE, initializing")
|
||||||
self.stores["param"] = Store("param", "cryptography", nodeNickname)
|
|
||||||
self.params = self.loadParamBytes(self.stores["param"].get()["self"])
|
|
||||||
self.cLog(20, "Param store initialized")
|
|
||||||
if os.path.exists("daisy/cryptography/{0}/key".format(nodeNickname)) == False:
|
|
||||||
self.cLog(20, "Key store DNE, initializing")
|
|
||||||
self.initStore("key")
|
self.initStore("key")
|
||||||
self.genKeyPair()
|
|
||||||
else:
|
else:
|
||||||
self.cLog(20, "Key store exists, loading")
|
logger.info("Key store exists, loading")
|
||||||
self.stores["key"] = Store("key", "cryptography", nodeNickname)
|
self.stores["key"] = Store(
|
||||||
self.cLog(20, "Store loaded")
|
"key", "cryptography", nodeNickname, daisyCryptography
|
||||||
# tks = self.stores["key"].get()
|
|
||||||
# self.publicKey = tks["self"]["publicKey"]
|
|
||||||
# self.privateKey = tks["self"]["privateKey"]
|
|
||||||
self.cLog(20, "Key store initialized")
|
|
||||||
|
|
||||||
def checkInMem(self, store: str, nodeID: str):
|
|
||||||
"""
|
|
||||||
Check if parameters or keys are loaded for node of nodeID
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
store: str
|
|
||||||
Whether to check loaded keys or parameters
|
|
||||||
|
|
||||||
"""
|
|
||||||
if store == "param":
|
|
||||||
return nodeID in self.loadedParams.keys()
|
|
||||||
elif store == "key":
|
|
||||||
return nodeID in self.loadedKeys.keys()
|
|
||||||
|
|
||||||
def loadRecordToMem(self, store: str, nodeID: str):
|
|
||||||
"""
|
|
||||||
Load record of nodeID from store to either keys or pameters
|
|
||||||
"""
|
|
||||||
r = self.getRecord(store, nodeID)
|
|
||||||
if r == False:
|
|
||||||
self.cLog(
|
|
||||||
30, "Tried to load nonexistent {0} for node {1}".format(store, nodeID)
|
|
||||||
)
|
)
|
||||||
return False
|
logger.info("Key store initialized")
|
||||||
elif self.checkInMem(store, nodeID):
|
srecord = self.getRecord("key", "self")
|
||||||
self.cLog(10, "{0}s already deserialized, skipping".format(store))
|
if srecord == False:
|
||||||
|
self.stores["key"].createEmpty("self")
|
||||||
|
# TODO: Note that this happens in the docs
|
||||||
|
self.stores["key"].update(
|
||||||
|
"self",
|
||||||
|
{"PSK": self.daisyCryptography.pad(psk).encode("utf-8")},
|
||||||
|
write=False
|
||||||
|
)
|
||||||
|
if "publicKey" not in self.getRecord("key", "self").keys():
|
||||||
|
self.addPublickey(None, None, forSelf=True)
|
||||||
else:
|
else:
|
||||||
if store == "param":
|
self.stores["key"].update("self", {
|
||||||
self.loadedParams[nodeID] = self.loadParamBytes(r)
|
"publicKey": ECC.import_key(
|
||||||
elif store == "key":
|
self.getRecord("key", "self")["publicKey"]
|
||||||
self.loadedKeys[nodeID] = {
|
)
|
||||||
"publicKey": Serialization.load_pem_public_key(r["publicKey"]),
|
}, write=False)
|
||||||
"privateKey": Serialization.load_pem_private_key(
|
|
||||||
r["privateKey"], None
|
|
||||||
),
|
|
||||||
}
|
|
||||||
return True
|
|
||||||
|
|
||||||
def getRecord(self, store: str, key: str):
|
def kdf(self, bytesX):
|
||||||
|
"""
|
||||||
|
Key derivation function
|
||||||
|
"""
|
||||||
|
# TODO: Better explanation
|
||||||
|
return SHAKE128.new(bytesX).read(32)
|
||||||
|
|
||||||
|
def getRecord(self, store: str, key: str, ephemeral=False):
|
||||||
"""
|
"""
|
||||||
Get record from store: store with key: key
|
Get record from store: store with key: key
|
||||||
"""
|
"""
|
||||||
r = stores[store].getRecord(key)
|
r = self.stores[store].getRecord(key, ephemeral=ephemeral)
|
||||||
if r == False:
|
if r == False:
|
||||||
self.cLog(20, "Record does not exist")
|
logger.log(20, "Record does not exist")
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
return r
|
return r
|
||||||
|
@ -140,157 +116,231 @@ class DHEFern:
|
||||||
"""
|
"""
|
||||||
Initialize store: store
|
Initialize store: store
|
||||||
"""
|
"""
|
||||||
self.stores[store] = Store(store, "cryptography", self.nodeNickname)
|
self.stores[store] = Store(
|
||||||
if store == "param":
|
store, "cryptography", self.nodeNickname, self.daisyCryptography
|
||||||
self.genParams()
|
)
|
||||||
self.stores[store].update("self", self.getParamsBytes(), recur=False)
|
if store == "key":
|
||||||
elif store == "key":
|
|
||||||
self.stores[store].update("self", {}, recur=False)
|
self.stores[store].update("self", {}, recur=False)
|
||||||
else:
|
else:
|
||||||
self.cLog(30, "Store not defined")
|
logger.warning("Store not defined")
|
||||||
|
|
||||||
def genParams(self):
|
def genStaticKey(self, onodeID):
|
||||||
"""
|
"""
|
||||||
Generate Diffie Hellman parameters
|
Generate static key for session encryption with given node
|
||||||
"""
|
"""
|
||||||
params = dh.generate_parameters(generator=2, key_size=2048)
|
staticKey = ECC.generate(curve="p256")
|
||||||
self.params = params
|
|
||||||
return params
|
|
||||||
|
|
||||||
def getParamsBytes(self):
|
|
||||||
"""
|
|
||||||
Get bytes encoded from self.parameters (TODO: Encode from store)
|
|
||||||
"""
|
|
||||||
return self.params.parameter_bytes(Encoding.PEM, ParameterFormat.PKCS3)
|
|
||||||
|
|
||||||
def loadParamBytes(self, pemBytes: bytes):
|
|
||||||
"""
|
|
||||||
Load parameters to self.params from given bytes (TODO: Load from store)
|
|
||||||
"""
|
|
||||||
self.params = Serialization.load_pem_parameters(pemBytes)
|
|
||||||
return self.params
|
|
||||||
|
|
||||||
def genKeyPair(self, paramsOverride=False, setSelf: bool = True):
|
|
||||||
"""
|
|
||||||
Generate public and private keys from self.params (TODO: Gen from passed params)
|
|
||||||
|
|
||||||
paramsOverride
|
|
||||||
False or parameters to use (TODO)
|
|
||||||
|
|
||||||
setSelf: bool
|
|
||||||
Whether to set self.privateKey and self.publicKey
|
|
||||||
"""
|
|
||||||
privateKey = self.params.generate_private_key()
|
|
||||||
if setSelf:
|
|
||||||
self.privateKey = privateKey
|
|
||||||
publicKey = privateKey.public_key()
|
|
||||||
if setSelf:
|
|
||||||
self.publicKey = publicKey
|
|
||||||
self.stores["key"].update(
|
self.stores["key"].update(
|
||||||
"self",
|
onodeID,
|
||||||
{
|
{
|
||||||
"publicKey": self.publicKey.public_bytes(
|
"staticKey": staticKey.export_key(
|
||||||
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
|
format="PEM", prot_params={"iteration_count": 131072}
|
||||||
),
|
)
|
||||||
"privateKey": self.privateKey.private_bytes(
|
|
||||||
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
|
|
||||||
),
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
return [privateKey, publicKey]
|
self.stores["key"].update(onodeID, {"staticKey": staticKey}, write=False)
|
||||||
|
|
||||||
|
def genOurEphemeralKey(self, onodeID):
|
||||||
|
"""
|
||||||
|
Generate epehemeral key for session encryption with given node
|
||||||
|
"""
|
||||||
|
ourEphemeralKey = ECC.generate(curve="p256")
|
||||||
|
self.stores["key"].update(onodeID, {"ourEphemeralKey": ourEphemeralKey}, write=False)
|
||||||
|
|
||||||
|
def addPublickey(self, onodeID, publicKey, forSelf: bool = False):
|
||||||
|
"""
|
||||||
|
Add a public key for a given node including this one
|
||||||
|
|
||||||
|
onodeID
|
||||||
|
Node identifier
|
||||||
|
|
||||||
|
publicKey
|
||||||
|
Public key to add
|
||||||
|
|
||||||
|
forSelf: bool
|
||||||
|
Whether to add key for this node
|
||||||
|
"""
|
||||||
|
if forSelf:
|
||||||
|
publicKey = ECC.generate(curve="p256")
|
||||||
|
self.stores["key"].update("self", {
|
||||||
|
"publicKey": publicKey.export_key(
|
||||||
|
format="PEM",
|
||||||
|
prot_params={"iteration_count": 131072}
|
||||||
|
)})
|
||||||
|
self.stores["key"].update("self", {
|
||||||
|
"publicKey": publicKey
|
||||||
|
},
|
||||||
|
write=False)
|
||||||
else:
|
else:
|
||||||
publicKey = publicKey.public_bytes(
|
logger.info("Importing keys")
|
||||||
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
|
record = self.getRecord("key", onodeID)
|
||||||
|
if record == False:
|
||||||
|
self.stores["key"].createEmpty(onodeID)
|
||||||
|
self.stores["key"].update(onodeID, {"publicKey": publicKey})
|
||||||
|
self.stores["key"].update(onodeID, {"publicKey": ECC.import_key(publicKey)}, write=False)
|
||||||
|
|
||||||
|
def addPeerEphemeralKey(self, onodeID, peerEphemeralKey: bytes):
|
||||||
|
"""
|
||||||
|
Add a peer node's epehemeral key for session encryption
|
||||||
|
|
||||||
|
onodeID
|
||||||
|
Node identifier
|
||||||
|
|
||||||
|
peerEphemeralKey: bytes
|
||||||
|
Serialized ephemeral key
|
||||||
|
"""
|
||||||
|
self.stores["key"].update(onodeID, {"peerEphemeralKey": ECC.import_key(peerEphemeralKey)}, write=False)
|
||||||
|
|
||||||
|
def sessionSetup(self, onodeID, peerEphemeralKey: bytes):
|
||||||
|
"""
|
||||||
|
Set up transport encryption session
|
||||||
|
|
||||||
|
onodeID
|
||||||
|
Node identifier
|
||||||
|
|
||||||
|
peerEphemeralKey: bytes
|
||||||
|
Serialized ephemeral key
|
||||||
|
"""
|
||||||
|
if not self.getRecord("key", onodeID):
|
||||||
|
logger.warning("No record, waiting for announce")
|
||||||
|
else:
|
||||||
|
self.addPeerEphemeralKey(onodeID, peerEphemeralKey)
|
||||||
|
self.generateSessionKey(onodeID)
|
||||||
|
|
||||||
|
def generateSessionKey(self, onodeID):
|
||||||
|
"""
|
||||||
|
Generate session key for transport encryption
|
||||||
|
|
||||||
|
onodeID
|
||||||
|
Node identifier
|
||||||
|
"""
|
||||||
|
keysOb = self.getRecord("key", onodeID, ephemeral=True)
|
||||||
|
if ("publicKey" not in keysOb) or ("staticKey" not in keysOb):
|
||||||
|
dkeysOb = self.getRecord("key", onodeID)
|
||||||
|
if ("publicKey" not in keysOb):
|
||||||
|
self.stores["key"].update(
|
||||||
|
onodeID, {
|
||||||
|
"publicKey": ECC.import_key(
|
||||||
|
dkeysOb["publicKey"]
|
||||||
)
|
)
|
||||||
privateKey = privateKey.private_bytes(
|
}, write=False
|
||||||
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
|
|
||||||
)
|
)
|
||||||
return [privateKey, publicKey]
|
if ("staticKey" not in keysOb):
|
||||||
|
self.stores["key"].update(
|
||||||
def keyDerive(self, pubKey: bytes, salt: bytes, nodeID: str, params: bytes):
|
onodeID, {
|
||||||
"""
|
"staticKey": ECC.import_key(
|
||||||
Derive shared key using Diffie Hellman
|
dkeysOb["staticKey"]
|
||||||
|
|
||||||
pubKey: bytes
|
|
||||||
Public key
|
|
||||||
|
|
||||||
nodeID: str
|
|
||||||
PierMesh node ID
|
|
||||||
|
|
||||||
params: bytes
|
|
||||||
Encryption parameters
|
|
||||||
"""
|
|
||||||
if self.checkInMem("param", nodeID) == False:
|
|
||||||
if self.getRecord("param", nodeID) == False:
|
|
||||||
self.updateStore("param", nodeID, params, recur=False)
|
|
||||||
self.loadRecordToMem("param", nodeID)
|
|
||||||
self.cLog(20, "Precheck done for key derivation")
|
|
||||||
|
|
||||||
# TODO: Load them and if private key exists load it, otherwise generate a private key
|
|
||||||
if self.checkInMem("key", nodeID) == False:
|
|
||||||
if self.getRecord("key", nodeID) == False:
|
|
||||||
privateKey, publicKey = self.genKeyPair(setSelf=False)
|
|
||||||
self.updateStore(
|
|
||||||
"key", nodeID, {"publicKey": publicKey, "privateKey": privateKey}
|
|
||||||
)
|
)
|
||||||
self.loadRecordToMem("key", nodeID)
|
}, write=False
|
||||||
|
|
||||||
sharedKey = self.loadedKeys[nodeID]["privateKey"].exchange(
|
|
||||||
Serialization.load_pem_public_key(pubKey)
|
|
||||||
)
|
)
|
||||||
# Perform key derivation.
|
keysOb = self.getRecord("key", onodeID, ephemeral=True)
|
||||||
self.cLog(20, "Performing key derivation")
|
reget = False
|
||||||
derivedKey = HKDF(
|
if "staticKey" not in keysOb:
|
||||||
algorithm=hashes.SHA256(), length=32, salt=salt, info=b"handshake data"
|
self.genStaticKey(onodeID)
|
||||||
).derive(sharedKey)
|
reget = True
|
||||||
self.cLog(20, "Derived key")
|
if "ourEphemeralKey" not in keysOb:
|
||||||
ederivedKey = base64.urlsafe_b64encode(derivedKey)
|
self.genOurEphemeralKey(onodeID)
|
||||||
tr = self.getRecord("key", nodeID)
|
reget = True
|
||||||
tr["derivedKey"] = ederivedKey
|
if reget:
|
||||||
self.updateStore("key", nodeID, tr)
|
keysOb = self.getRecord("key", onodeID, ephemeral=True)
|
||||||
self.cLog(20, "Done with cryptography store updates")
|
sessionKey = key_agreement(
|
||||||
return ederivedKey
|
static_priv=keysOb["staticKey"],
|
||||||
|
static_pub=keysOb["publicKey"],
|
||||||
|
eph_priv=keysOb["ourEphemeralKey"],
|
||||||
|
eph_pub=keysOb["peerEphemeralKey"],
|
||||||
|
kdf=self.kdf,
|
||||||
|
)
|
||||||
|
self.stores["key"].update(onodeID, {"sessionKey": sessionKey}, write=False)
|
||||||
|
return sessionKey
|
||||||
|
|
||||||
def getSalt(self):
|
def encrypt(self, data, nodeID: str, isDict: bool = True, pskEncrypt=False):
|
||||||
"""
|
"""
|
||||||
Get random salt
|
Encrypt given data with AES GCM
|
||||||
"""
|
|
||||||
return os.urandom(16)
|
|
||||||
|
|
||||||
# TODO: Build in transport security (node/node)
|
|
||||||
def encrypt(self, data, nodeID: str, isDict: bool = True):
|
|
||||||
"""
|
|
||||||
Do Fernet encryption
|
|
||||||
|
|
||||||
data
|
data
|
||||||
Either bytes or dict to encrypt
|
Either bytes or dict to encrypt
|
||||||
|
|
||||||
isDict: bool
|
isDict: bool
|
||||||
Whether data is a dictionary
|
Whether data is a dictionary
|
||||||
|
|
||||||
|
pskEncrypt: bool
|
||||||
|
Whether to encrypt with pre-shared key
|
||||||
"""
|
"""
|
||||||
r = self.getRecord("key", nodeID)
|
if nodeID == "-00001" or pskEncrypt:
|
||||||
if r == False:
|
cipher = AES.new(self.getRecord("key", "self", ephemeral=True)["PSK"], AES.MODE_GCM)
|
||||||
self.cLog(20, "Node {0} not in keystore".format(nodeID))
|
nonce = cipher.nonce
|
||||||
return False
|
|
||||||
else:
|
|
||||||
derivedKey = r["derivedKey"]
|
|
||||||
fernet = Fernet(derivedKey)
|
|
||||||
if isDict:
|
if isDict:
|
||||||
data = msgpack.dumps(data)
|
data = msgpack.dumps(data)
|
||||||
token = fernet.encrypt(data)
|
ciphertext, tag = cipher.encrypt_and_digest(data)
|
||||||
return token
|
return (ciphertext, nonce, tag)
|
||||||
|
elif (self.getRecord("key", nodeID)) == False:
|
||||||
def decrypt(self, data, nodeID: str):
|
logger.log(30, "Node {0} not in keychain".format(nodeID))
|
||||||
"""
|
|
||||||
Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load)
|
|
||||||
"""
|
|
||||||
r = self.getRecord("key", nodeID)
|
|
||||||
if r == False:
|
|
||||||
self.cLog(20, "No record of node " + nodeID)
|
|
||||||
return False
|
|
||||||
elif not "derivedKey" in r.keys():
|
|
||||||
self.cLog(20, "No key derived for node " + nodeID)
|
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
fernet = Fernet(self.getRecord("key", nodeID)["derivedKey"])
|
r = self.getRecord("key", nodeID, ephemeral=True)
|
||||||
return msgpack.loads(fernet.decrypt(data))
|
if r == False:
|
||||||
|
r = self.getRecord("key", "self", ephemeral=True)
|
||||||
|
logger.info(r)
|
||||||
|
if "sessionKey" in r.keys():
|
||||||
|
sessionKey = r["sessionKey"]
|
||||||
|
cipher = AES.new(sessionKey, AES.MODE_GCM)
|
||||||
|
nonce = cipher.nonce
|
||||||
|
if isDict:
|
||||||
|
data = msgpack.dumps(data)
|
||||||
|
ciphertext, tag = cipher.encrypt_and_digest(data)
|
||||||
|
return (ciphertext, nonce, tag)
|
||||||
|
elif "PSK" in r.keys():
|
||||||
|
cipher = AES.new(r["PSK"], AES.MODE_GCM)
|
||||||
|
nonce = cipher.nonce
|
||||||
|
if isDict:
|
||||||
|
data = msgpack.dumps(data)
|
||||||
|
ciphertext, tag = cipher.encrypt_and_digest(data)
|
||||||
|
return (ciphertext, nonce, tag)
|
||||||
|
else:
|
||||||
|
logger.log(20, "Node {0} does not have session key".format(nodeID))
|
||||||
|
|
||||||
|
def decrypt(self, data, onodeID: str, nonce, tag):
|
||||||
|
"""
|
||||||
|
Decrypt bytes and return either str or dict depending on result
|
||||||
|
|
||||||
|
onodeID: str
|
||||||
|
Node identifier
|
||||||
|
|
||||||
|
nonce
|
||||||
|
Encryption nonce
|
||||||
|
|
||||||
|
tag
|
||||||
|
Encryption tag
|
||||||
|
"""
|
||||||
|
record = self.getRecord("key", onodeID, ephemeral=True)
|
||||||
|
if (record == False) or ("sessionKey" not in record.keys()):
|
||||||
|
cipher = AES.new(self.getRecord("key", "self", ephemeral=True)["PSK"], AES.MODE_GCM, nonce=nonce)
|
||||||
|
|
||||||
|
data = cipher.decrypt(data)
|
||||||
|
# logger.debug(data)
|
||||||
|
try:
|
||||||
|
data = msgpack.loads(lzma.decompress(data))
|
||||||
|
except Exception:
|
||||||
|
logger.error(traceback.format_exc())
|
||||||
|
return False
|
||||||
|
# logger.debug("Decrypt/deserialize output")
|
||||||
|
# logger.debug(data)
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
if "sessionKey" in record.keys():
|
||||||
|
sessionKey = record["sessionKey"]
|
||||||
|
cipher = AES.new(sessionKey, AES.MODE_GCM, nonce=nonce)
|
||||||
|
|
||||||
|
data = cipher.decrypt(data)
|
||||||
|
data = msgpack.loads(lzma.decompress(data))
|
||||||
|
|
||||||
|
return data
|
||||||
|
elif "PSK" in record.keys():
|
||||||
|
cipher = AES.new(record["PSK"], AES.MODE_GCM, nonce=nonce)
|
||||||
|
|
||||||
|
data = cipher.decrypt(data)
|
||||||
|
data = msgpack.loads(lzma.decompress(data))
|
||||||
|
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
logger.warning("Node {0} does not have session key".format(onodeID))
|
||||||
|
return False
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
|
|
||||||
WhaleSong: Diffie hellman ephemeral Fernet based encryption
|
WhaleSong
|
||||||
===========================================================
|
=========
|
||||||
|
|
||||||
.. autoclass:: Cryptography.WhaleSong.DHEFern
|
.. autoclass:: Cryptography.WhaleSong.Transport
|
||||||
:members:
|
:members:
|
||||||
:undoc-members:
|
:undoc-members:
|
||||||
|
|
|
@ -1,68 +1,74 @@
|
||||||
from Daisy.Daisy import Daisy
|
from Daisy.Daisy import Daisy
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
import msgpack
|
import msgpack
|
||||||
|
|
||||||
from watchdog.observers import Observer
|
|
||||||
|
|
||||||
# TODO: Dumping to cacheFile
|
# TODO: Dumping to cacheFile
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
class Cache:
|
class Cache:
|
||||||
"""
|
"""
|
||||||
In memory collection of Daisy records
|
In memory collection of Daisy records, provides a search functionality currently utilized by `Daisy.Catch.Catch`
|
||||||
|
|
||||||
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Cache.py>`__
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Cache.py>`__
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
daisyCryptography,
|
||||||
filepaths=None,
|
filepaths=None,
|
||||||
cacheFile=None,
|
cacheFile=None,
|
||||||
path: str = "daisy",
|
path: str = "daisy",
|
||||||
walk: bool = False,
|
walk: bool = False
|
||||||
isCatch: bool = False,
|
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
|
daisyCryptography: Daisy.CryptographyUtil.SteelPetal
|
||||||
|
Record cryptography reference
|
||||||
|
|
||||||
filepaths
|
filepaths
|
||||||
Either a list of filepaths to load or None
|
Either a list of filepaths to load or None
|
||||||
|
|
||||||
cacheFile
|
cacheFile
|
||||||
Path to a cache file which is a collection of paths to load
|
Path to a cache file which is a collection of paths to load or None
|
||||||
|
|
||||||
path: str
|
path: str
|
||||||
Path prefix to load records from
|
Path prefix to load records from
|
||||||
|
|
||||||
walk: bool
|
walk: bool
|
||||||
Whether to automatically walk the path and load records
|
Whether to automatically walk the path and load records
|
||||||
|
|
||||||
isCatch: bool
|
|
||||||
Whether this cache is for catchs
|
|
||||||
"""
|
"""
|
||||||
|
self.daisyCryptography = daisyCryptography
|
||||||
self.data = {}
|
self.data = {}
|
||||||
self.path = path
|
self.path = path
|
||||||
|
|
||||||
if filepaths != None:
|
if not os.path.exists(self.path):
|
||||||
|
os.makedirs(self.path)
|
||||||
|
|
||||||
|
if filepaths is not None:
|
||||||
for fp in filepaths:
|
for fp in filepaths:
|
||||||
fp = path + "/" + fp
|
fp = path + "/" + fp
|
||||||
if os.path.isfile(fp):
|
if os.path.isfile(fp):
|
||||||
self.data[fp] = Daisy(fp)
|
self.data[fp] = Daisy(fp, daisyCryptography)
|
||||||
elif cacheFile != None:
|
elif cacheFile is not None:
|
||||||
|
self.cacheFile = cacheFile
|
||||||
with open(cacheFile, "r") as f:
|
with open(cacheFile, "r") as f:
|
||||||
for fp in f.read().split("\n"):
|
for fp in f.read().split("\n"):
|
||||||
self.data[fp] = Daisy(fp)
|
self.data[fp] = Daisy(fp, daisyCryptography)
|
||||||
elif walk:
|
elif walk:
|
||||||
for root, dirs, files in os.walk(self.path):
|
for root, dirs, files in os.walk(self.path):
|
||||||
for p in dirs + files:
|
for p in dirs + files:
|
||||||
if not (".json" in p):
|
if not (".json" in p):
|
||||||
if not (".md" in p):
|
if not (".md" in p):
|
||||||
tpath = root + "/" + p
|
tpath = root + "/" + p
|
||||||
self.data[tpath] = Daisy(tpath)
|
self.data[tpath] = Daisy(tpath, daisyCryptography)
|
||||||
|
|
||||||
def create(self, path: str, data: dict):
|
def create(self, path: str, data: dict, remote=False):
|
||||||
"""
|
"""
|
||||||
Create new record
|
Create new record
|
||||||
|
|
||||||
|
@ -73,13 +79,23 @@ class Cache:
|
||||||
|
|
||||||
data: dict
|
data: dict
|
||||||
Data to populate record with
|
Data to populate record with
|
||||||
|
|
||||||
|
remote: bool
|
||||||
|
Whether this is a reference to a distributed file (not implemented yet)
|
||||||
"""
|
"""
|
||||||
|
if not remote:
|
||||||
with open(self.path + "/" + path, "wb") as f:
|
with open(self.path + "/" + path, "wb") as f:
|
||||||
f.write(msgpack.dumps(data))
|
f.write(msgpack.dumps(data))
|
||||||
# logging.log(10, "Done creating record")
|
logger.debug("Done creating record")
|
||||||
self.data[path] = Daisy(self.path + "/" + path)
|
self.data[path] = Daisy(self.path + "/" + path, self.daisyCryptography)
|
||||||
# logging.log(10, "Done loading to Daisy")
|
logger.debug("Done loading to Daisy")
|
||||||
return self.data[path]
|
return self.data[path]
|
||||||
|
else:
|
||||||
|
logger.debug("Not that (you shouldn't be here yet, remote Daisy links aren't ready yet)")
|
||||||
|
# TODO: Full remote path functionality
|
||||||
|
pass
|
||||||
|
# self.data[path] = Ref(path, remote)
|
||||||
|
# return self.data[path]
|
||||||
|
|
||||||
def get(self, path: str):
|
def get(self, path: str):
|
||||||
"""
|
"""
|
||||||
|
@ -92,10 +108,11 @@ class Cache:
|
||||||
return self.data[path]
|
return self.data[path]
|
||||||
else:
|
else:
|
||||||
if os.path.exists(self.path + "/" + path):
|
if os.path.exists(self.path + "/" + path):
|
||||||
self.data[path] = Daisy(self.path + "/" + path)
|
self.data[path] = Daisy(self.path + "/" + path, self.daisyCryptography)
|
||||||
return self.data[path]
|
return self.data[path]
|
||||||
else:
|
else:
|
||||||
# logging.log(10, "File does not exist")
|
path = self.path + "/" + path
|
||||||
|
logger.debug(f"File {path} does not exist")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def refresh(self):
|
def refresh(self):
|
||||||
|
@ -107,18 +124,18 @@ class Cache:
|
||||||
|
|
||||||
def search(self, keydict: dict, strict: bool = True):
|
def search(self, keydict: dict, strict: bool = True):
|
||||||
"""
|
"""
|
||||||
Search cache for record for records with values
|
Search cache for record for records with keys and values matching those
|
||||||
|
in the keydict
|
||||||
|
|
||||||
keydict: dict
|
keydict: dict
|
||||||
Values to search for
|
|
||||||
|
|
||||||
strict: bool
|
strict: bool
|
||||||
Whether to require values match
|
Whether to require all keys/values match
|
||||||
"""
|
"""
|
||||||
results = []
|
results = []
|
||||||
for key, val in self.data.items():
|
for key, val in self.data.items():
|
||||||
val = val.get()
|
val = val.get()
|
||||||
if strict and type(val) != str:
|
if strict and type(val) is not str:
|
||||||
addcheck = False
|
addcheck = False
|
||||||
for k, v in keydict.items():
|
for k, v in keydict.items():
|
||||||
if k in val.keys():
|
if k in val.keys():
|
||||||
|
@ -129,7 +146,7 @@ class Cache:
|
||||||
break
|
break
|
||||||
if addcheck:
|
if addcheck:
|
||||||
results.append([key, val])
|
results.append([key, val])
|
||||||
elif type(val) != str:
|
elif type(val) is not str:
|
||||||
for k, v in keydict.items():
|
for k, v in keydict.items():
|
||||||
if k in val.keys():
|
if k in val.keys():
|
||||||
if v in val[k]:
|
if v in val[k]:
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
|
# NOTE: Local imports
|
||||||
from Daisy.Cache import Cache
|
from Daisy.Cache import Cache
|
||||||
|
# from Daisy.Ref import Ref
|
||||||
|
|
||||||
|
# NOTE: Generic imports
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
|
|
||||||
|
@ -16,13 +19,37 @@ class Catch(Cache):
|
||||||
catches = {}
|
catches = {}
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, path: str = "catch", filepaths=None, catchFile=None, walk: bool = False
|
self,
|
||||||
|
daisyCryptography,
|
||||||
|
path: str = "catch",
|
||||||
|
filepaths=None,
|
||||||
|
catchFile=None,
|
||||||
|
walk: bool = False,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Basically the same initialization parameters as Catch
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
Path prefix to load records from
|
||||||
|
|
||||||
|
daisyCryptography: Daisy.CryptographyUtil.SteelPetal
|
||||||
|
Record cryptography reference
|
||||||
|
|
||||||
|
filepaths
|
||||||
|
Either a list of filepaths to load or None
|
||||||
|
|
||||||
|
cacheFile
|
||||||
|
Path to a cache file which is a collection of paths to load or None
|
||||||
|
|
||||||
|
walk: bool
|
||||||
|
Whether to automatically walk the path and load records
|
||||||
"""
|
"""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
filepaths=filepaths, cacheFile=catchFile, path=path, walk=walk, isCatch=True
|
daisyCryptography,
|
||||||
|
filepaths=filepaths,
|
||||||
|
cacheFile=catchFile,
|
||||||
|
path=path,
|
||||||
|
walk=walk,
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: Fins
|
# TODO: Fins
|
||||||
|
@ -34,7 +61,7 @@ class Catch(Cache):
|
||||||
return super().get(path)
|
return super().get(path)
|
||||||
|
|
||||||
# TODO: Rename
|
# TODO: Rename
|
||||||
def get(self, head: str, tail: str, fins=None):
|
def get(self, head: str, body: str, fins=None):
|
||||||
"""
|
"""
|
||||||
Get catch by pieces
|
Get catch by pieces
|
||||||
|
|
||||||
|
@ -49,21 +76,41 @@ class Catch(Cache):
|
||||||
fins
|
fins
|
||||||
List of (maximum 8 characters) strings at the end of the catch oe None if none
|
List of (maximum 8 characters) strings at the end of the catch oe None if none
|
||||||
"""
|
"""
|
||||||
r = self.search({"head": head, "tail": tail})
|
r = ""
|
||||||
|
if fins is not None and fins != "":
|
||||||
|
r = self.search({"head": head, "body": body, "fins": fins})
|
||||||
|
else:
|
||||||
|
r = self.search({"head": head, "body": body})
|
||||||
|
if len(r) < 1:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
return r[0][1]["html"]
|
return r[0][1]["html"]
|
||||||
|
|
||||||
def addc(self, peer, node, seperator, head, tail, data, fins=None):
|
def addc(self, peer, node, seperator, head, body, data, fins=None, remote=False):
|
||||||
tnpath = "catch/" + node
|
tnpath = f"catch/{node}"
|
||||||
if os.path.exists(tnpath) != True:
|
if not os.path.exists(self.path + "/" + tnpath):
|
||||||
os.makedirs(tnpath)
|
os.makedirs(self.path + "/" + tnpath)
|
||||||
tppath = tnpath + "/" + peer
|
tppath = tnpath + "/" + peer
|
||||||
if os.path.exists(tppath) != True:
|
if not os.path.exists(self.path + "/" + tppath):
|
||||||
os.makedirs(tppath)
|
os.makedirs(self.path + "/" + tppath)
|
||||||
sid = str(random.randrange(0, 999999)).zfill(6)
|
sid = str(random.randrange(0, 999999)).zfill(6)
|
||||||
data["seperator"] = seperator
|
data["seperator"] = seperator
|
||||||
data["head"] = head
|
data["head"] = head
|
||||||
data["tail"] = tail
|
data["body"] = body
|
||||||
if fins != None:
|
if fins is not None:
|
||||||
data["fins"] = fins
|
data["fins"] = fins
|
||||||
res = self.create("{0}/{1}/{2}".format(node, peer, sid), data)
|
res = self.create("{0}/{1}".format(tppath, sid), data, remote=remote)
|
||||||
return [sid, res]
|
return [sid, res]
|
||||||
|
|
||||||
|
def genIndex(self, onodeID):
|
||||||
|
dirList = []
|
||||||
|
for k, v in self.data.items():
|
||||||
|
curCatch = {"remoteNode": onodeID}
|
||||||
|
if type(v.msg) is not str:
|
||||||
|
curCatch = curCatch | v.msg
|
||||||
|
del curCatch["html"]
|
||||||
|
dirList.append(curCatch)
|
||||||
|
return dirList
|
||||||
|
|
||||||
|
def mergeIndex(self, remoteIndex):
|
||||||
|
self.remoteCatchesMap += remoteIndex
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
from Daisy.Daisy import Daisy
|
||||||
|
|
||||||
|
|
||||||
|
class Credential(Daisy):
|
||||||
|
"""
|
||||||
|
Currently unused credential class, will be fleshed out for credentialed access to the web ui
|
||||||
|
"""
|
||||||
|
def __init__(self, nodeNickname, credentialName, extension, daisyCryptography):
|
||||||
|
fname = "data/{0}/{1}.{2}".format(nodeNickname, credentialName, extension)
|
||||||
|
super().__init__(
|
||||||
|
fname,
|
||||||
|
daisyCryptography,
|
||||||
|
)
|
|
@ -0,0 +1,6 @@
|
||||||
|
Credential
|
||||||
|
==========
|
||||||
|
|
||||||
|
.. autoclass:: Daisy.Credential.Credential
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -0,0 +1,80 @@
|
||||||
|
# NOTE: Cryptography import
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
|
||||||
|
# NOTE: Generic imports
|
||||||
|
import traceback
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
|
|
||||||
|
class SteelPetal:
|
||||||
|
"""
|
||||||
|
Cryptography utility for encrypting files
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, key: str, nonce=None, testData=None):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
key: str
|
||||||
|
User's plaintext key
|
||||||
|
|
||||||
|
nonce
|
||||||
|
Cryptographic artifact we can use to reinitialize cryptographic operations
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if nonce is None:
|
||||||
|
self.cipher = AES.new(self.pad(key).encode("utf-8"), AES.MODE_GCM)
|
||||||
|
self.nonce = self.cipher.nonce
|
||||||
|
else:
|
||||||
|
self.cipher = AES.new(
|
||||||
|
self.pad(key).encode("utf-8"), AES.MODE_GCM, nonce=nonce
|
||||||
|
)
|
||||||
|
self.nonce = nonce
|
||||||
|
if testData is not None:
|
||||||
|
try:
|
||||||
|
self.cipher.decrypt(testData)
|
||||||
|
except Exception:
|
||||||
|
logger.error(traceback.format_exc())
|
||||||
|
return False
|
||||||
|
except Exception:
|
||||||
|
logger.error(traceback.format_exc())
|
||||||
|
|
||||||
|
def pad(self, key: str):
|
||||||
|
"""
|
||||||
|
Pad key to make it usable
|
||||||
|
|
||||||
|
key: str
|
||||||
|
User's plain text key
|
||||||
|
"""
|
||||||
|
BS = AES.block_size
|
||||||
|
key = key + (BS - len(key) % BS) * chr(BS - len(key) % BS)
|
||||||
|
return key
|
||||||
|
|
||||||
|
def encrypt(self, data: bytes):
|
||||||
|
"""
|
||||||
|
Encrypt binary data
|
||||||
|
|
||||||
|
data: bytes
|
||||||
|
Data to encrypt
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self.cipher.encrypt_and_digest(data)
|
||||||
|
except Exception:
|
||||||
|
logger.error(traceback.format_exec())
|
||||||
|
return False
|
||||||
|
|
||||||
|
def decrypt(self, data: bytes):
|
||||||
|
"""
|
||||||
|
Decrypt encrypted binary data
|
||||||
|
|
||||||
|
data: bytes
|
||||||
|
Data to decrypt
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self.cipher.decrypt(data)
|
||||||
|
except Exception:
|
||||||
|
logger.error(traceback.format_exec())
|
||||||
|
return False
|
|
@ -0,0 +1,6 @@
|
||||||
|
CryptographyUtil
|
||||||
|
================
|
||||||
|
|
||||||
|
.. autoclass:: Daisy.CryptographyUtil.SteelPetal
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -1,12 +1,11 @@
|
||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
import msgpack
|
import msgpack
|
||||||
|
import logging
|
||||||
|
|
||||||
# TODO: delete
|
# TODO: delete
|
||||||
# TODO: propagate json changes to msgpack automatically
|
|
||||||
# TODO: propagate msgpack changes to cache automatically
|
|
||||||
# TODO: Indexing
|
|
||||||
|
|
||||||
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
def _json_to_msg(path: str):
|
def _json_to_msg(path: str):
|
||||||
"""
|
"""
|
||||||
|
@ -43,9 +42,11 @@ class Daisy:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
filepath: str,
|
filepath: str,
|
||||||
|
daisyCryptography,
|
||||||
templates: dict = {},
|
templates: dict = {},
|
||||||
template: bool = False,
|
template: bool = False,
|
||||||
prefillDict: bool = False,
|
prefillDict: bool = False,
|
||||||
|
remote=False,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Parameters
|
Parameters
|
||||||
|
@ -59,33 +60,41 @@ class Daisy:
|
||||||
template: bool
|
template: bool
|
||||||
Which template to Use
|
Which template to Use
|
||||||
|
|
||||||
prefillDict: bool
|
prefillDict: dict
|
||||||
Whether to fill the record with a template
|
Data to prefill record with
|
||||||
"""
|
"""
|
||||||
|
# TODO: Finish remote implementation
|
||||||
|
self.remote = False
|
||||||
self.filepath = filepath
|
self.filepath = filepath
|
||||||
if os.path.exists(filepath) != True:
|
if remote:
|
||||||
|
self.remote = True
|
||||||
|
self.remoteNodeID = remote
|
||||||
|
else:
|
||||||
|
if not os.path.exists(filepath):
|
||||||
with open(filepath, "wb") as f:
|
with open(filepath, "wb") as f:
|
||||||
if template != False:
|
if template:
|
||||||
if template in templates.keys():
|
if template in templates.keys():
|
||||||
t = templates[template].get()
|
t = templates[template].get()
|
||||||
if prefillDict != False:
|
if prefillDict:
|
||||||
for k in prefillDict.keys():
|
for k in prefillDict.keys():
|
||||||
t[k] = prefillDict[k]
|
t[k] = prefillDict[k]
|
||||||
f.write(msgpack.dumps(t))
|
f.write(msgpack.dumps(t))
|
||||||
self.msg = t
|
self.msg = t
|
||||||
else:
|
else:
|
||||||
print("No such template as: " + template)
|
logger.error("No such template as: " + template)
|
||||||
else:
|
else:
|
||||||
f.write(msgpack.dumps({}))
|
t = {}
|
||||||
self.msg = {}
|
if prefillDict:
|
||||||
|
for k in prefillDict.keys():
|
||||||
|
t[k] = prefillDict[k]
|
||||||
|
f.write(msgpack.dumps(t))
|
||||||
|
self.msg = t
|
||||||
elif os.path.isdir(filepath):
|
elif os.path.isdir(filepath):
|
||||||
self.msg = "directory"
|
self.msg = "directory"
|
||||||
else:
|
else:
|
||||||
with open(filepath, "rb") as f:
|
with open(filepath, "rb") as f:
|
||||||
self.msg = msgpack.loads(f.read())
|
self.msg = msgpack.loads(f.read())
|
||||||
|
|
||||||
# Use override for updating
|
|
||||||
|
|
||||||
def write(
|
def write(
|
||||||
self,
|
self,
|
||||||
override=False,
|
override=False,
|
||||||
|
@ -94,7 +103,7 @@ class Daisy:
|
||||||
recur: bool = False,
|
recur: bool = False,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Write record to disk
|
Write record to disk, note: use override with updated record to update record
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
|
@ -110,11 +119,11 @@ class Daisy:
|
||||||
recur: bool
|
recur: bool
|
||||||
Whether to recursively handle keys
|
Whether to recursively handle keys
|
||||||
"""
|
"""
|
||||||
if override != False:
|
if override:
|
||||||
for key in override.keys():
|
for key in override.keys():
|
||||||
# TODO: Deeper recursion
|
# TODO: Deeper recursion
|
||||||
if recur:
|
if recur:
|
||||||
if not key in self.msg.keys():
|
if key not in self.msg.keys():
|
||||||
self.msg[key] = {}
|
self.msg[key] = {}
|
||||||
for ikey in override[key].keys():
|
for ikey in override[key].keys():
|
||||||
self.msg[key][ikey] = override[key][ikey]
|
self.msg[key][ikey] = override[key][ikey]
|
||||||
|
@ -164,26 +173,18 @@ class Daisy:
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def json_to_msg(self, path: str):
|
||||||
def loadTemplates(templatePath: str = "templates"):
|
"""
|
||||||
"""Load templates for prefilling records
|
Convert json at the path plus .json to a msgpack binary
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
templatePath: str
|
path: str
|
||||||
Path to templates
|
Path to json minus the extension
|
||||||
"""
|
"""
|
||||||
templates = {}
|
rpath = path + ".json"
|
||||||
for p in os.listdir(templatePath):
|
res = b""
|
||||||
p = templatePath + "/" + p
|
with open(rpath) as f:
|
||||||
if os.path.isdir(p):
|
res = msgpack.dumps(json.load(f))
|
||||||
for ip in os.listdir(p):
|
with open(path, "wb") as f:
|
||||||
ip = p + "/" + ip
|
f.write(res)
|
||||||
if os.path.isdir(ip):
|
|
||||||
print("Too deep, skipping: " + ip)
|
|
||||||
else:
|
|
||||||
templates[ip] = Daisy(ip)
|
|
||||||
else:
|
|
||||||
templates[p] = Daisy(p)
|
|
||||||
self.templates = templates
|
|
||||||
return templates
|
|
||||||
|
|
|
@ -0,0 +1,86 @@
|
||||||
|
from Daisy.Daisy import Daisy
|
||||||
|
|
||||||
|
|
||||||
|
class Index(Daisy):
|
||||||
|
"""
|
||||||
|
A searchable index of records, this is currently only half implemented
|
||||||
|
but works enough to hold our remote catch index
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
nodeNickname: str,
|
||||||
|
daisyCryptography,
|
||||||
|
prefill: list = [],
|
||||||
|
indexedFields: list = [],
|
||||||
|
autoIndex: bool = True,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
nodeNickname: str
|
||||||
|
Node nickname for record storage
|
||||||
|
|
||||||
|
daisyCryptography: Daisy.CryptographyUtil.SteelPetal
|
||||||
|
Record cryptography reference
|
||||||
|
|
||||||
|
prefill: list[dict]
|
||||||
|
List of objects to prefill the index with
|
||||||
|
|
||||||
|
indexedFields: list[str]
|
||||||
|
List of fields to index (what not to drop from a full record)
|
||||||
|
|
||||||
|
autoIndex: bool
|
||||||
|
Whether to automatically build the list of indexed fields present in the prefill objects
|
||||||
|
"""
|
||||||
|
if autoIndex:
|
||||||
|
if prefill != []:
|
||||||
|
if indexedFields == []:
|
||||||
|
for i in prefill:
|
||||||
|
# TODO: Value type annotation
|
||||||
|
# TODO: Value weighting
|
||||||
|
for k, v in i.items():
|
||||||
|
indexedFields.append(k)
|
||||||
|
indexedFields = list(set(indexedFields))
|
||||||
|
super().__init__(
|
||||||
|
f"{nodeNickname}/daisy/{nodeNickname}.index",
|
||||||
|
daisyCryptography,
|
||||||
|
prefillDict={"_index": prefill, "_fields": indexedFields},
|
||||||
|
)
|
||||||
|
|
||||||
|
def addEntry(self, entry: dict):
|
||||||
|
"""
|
||||||
|
Add a record to the index
|
||||||
|
|
||||||
|
entry: dict
|
||||||
|
Record to add to the index
|
||||||
|
"""
|
||||||
|
# TODO: Filter entry for only indexed fields
|
||||||
|
index = self.msg["_index"]
|
||||||
|
index.append(entry)
|
||||||
|
self.write(override={"_index": index})
|
||||||
|
|
||||||
|
def search(self, keydict: dict, strict: bool = True):
|
||||||
|
"""
|
||||||
|
Search index for record for records with values
|
||||||
|
|
||||||
|
keydict: dict
|
||||||
|
Keys/Values to search for
|
||||||
|
|
||||||
|
strict: bool
|
||||||
|
Whether to require all keys/values match
|
||||||
|
"""
|
||||||
|
results = []
|
||||||
|
for ob in self.msg["_index"]:
|
||||||
|
if strict and type(ob) is not str:
|
||||||
|
addcheck = False
|
||||||
|
for k, v in keydict.items():
|
||||||
|
if k in ob.keys():
|
||||||
|
if v in ob[k]:
|
||||||
|
addcheck = True
|
||||||
|
else:
|
||||||
|
addcheck = False
|
||||||
|
break
|
||||||
|
if addcheck:
|
||||||
|
results.append(ob)
|
||||||
|
return results
|
|
@ -0,0 +1,6 @@
|
||||||
|
Index
|
||||||
|
=====
|
||||||
|
|
||||||
|
.. autoclass:: Daisy.Index.Index
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -0,0 +1,16 @@
|
||||||
|
from Daisy.Daisy import Daisy
|
||||||
|
|
||||||
|
|
||||||
|
class Ref(Daisy):
|
||||||
|
"""
|
||||||
|
Reference to a remote record
|
||||||
|
|
||||||
|
metadata: dict
|
||||||
|
Data to fill record with, should only be metadata
|
||||||
|
|
||||||
|
path: str
|
||||||
|
Where to store data locally
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, metadata: dict, path: str, remoteNodeID: str):
|
||||||
|
super().__init__(path, remote=remoteNodeID, prefillDict=metadata)
|
|
@ -0,0 +1,6 @@
|
||||||
|
Ref
|
||||||
|
===
|
||||||
|
|
||||||
|
.. autoclass:: Daisy.Ref.Ref
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -1,49 +0,0 @@
|
||||||
from watchdog.observers import Observer
|
|
||||||
from watchdog.events import FileSystemEventHandler
|
|
||||||
|
|
||||||
global garden
|
|
||||||
"""
|
|
||||||
Map of instances to list of signals
|
|
||||||
to be processed
|
|
||||||
"""
|
|
||||||
garden = {}
|
|
||||||
|
|
||||||
|
|
||||||
class Compound(FileSystemEventHandler):
|
|
||||||
"""
|
|
||||||
File system watcher to propagate disk changes
|
|
||||||
|
|
||||||
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Soil.py>`__
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cache, isCatch: bool = False):
|
|
||||||
"""
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
cache: Cache
|
|
||||||
Daisy cache to update
|
|
||||||
|
|
||||||
isCatch: bool
|
|
||||||
Is the cache for catchs
|
|
||||||
"""
|
|
||||||
self.cache = cache
|
|
||||||
self.isCatch = isCatch
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
def on_any_event(self, event):
|
|
||||||
"""
|
|
||||||
Called when a CRUD operation is performed on a record file
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
event
|
|
||||||
Event object provided by watchdog
|
|
||||||
"""
|
|
||||||
if not (".json" in event.src_path):
|
|
||||||
if not (".md" in event.src_path):
|
|
||||||
tpath = "/".join(event.src_path.split("/")[1:])
|
|
||||||
if tpath != "":
|
|
||||||
if self.isCatch:
|
|
||||||
self.cache.sget(tpath)
|
|
||||||
else:
|
|
||||||
self.cache.get(tpath).get()
|
|
|
@ -1,6 +0,0 @@
|
||||||
Soil: Daisy signal management
|
|
||||||
=============================
|
|
||||||
|
|
||||||
.. autoclass:: Daisy.Soil.Compound
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
|
@ -1,33 +1,118 @@
|
||||||
from Daisy.Daisy import Daisy
|
from Daisy.Daisy import Daisy
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
|
|
||||||
class Store(Daisy):
|
class Store(Daisy):
|
||||||
"""
|
"""
|
||||||
Key value store
|
Key value store
|
||||||
|
|
||||||
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Store.py>`__
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Daisy/Store.py>`_
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, store: str, path: str, nodeNickname: str):
|
Attributes
|
||||||
fpath = "daisy/{0}/{1}".format(path, nodeNickname)
|
----------
|
||||||
cpath = "{0}/{1}/{2}".format(path, nodeNickname, store)
|
|
||||||
|
epehemeral: dict
|
||||||
|
Memory only records
|
||||||
|
"""
|
||||||
|
def __init__(self, store: str, path: str, nodeNickname: str, daisyCryptography):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
store: str
|
||||||
|
Store name/filename
|
||||||
|
|
||||||
|
path: str
|
||||||
|
Folder record should be in
|
||||||
|
|
||||||
|
nodeNickname: str
|
||||||
|
Node nickname for record storage
|
||||||
|
|
||||||
|
daisyCryptography: Daisy.CryptographyUtil.SteelPetal
|
||||||
|
Record cryptography reference
|
||||||
|
"""
|
||||||
|
fpath = f"{nodeNickname}/daisy/{path}"
|
||||||
|
cpath = f"{fpath}/{store}"
|
||||||
if not os.path.exists(fpath):
|
if not os.path.exists(fpath):
|
||||||
os.mkdir(fpath)
|
os.mkdir(fpath)
|
||||||
super().__init__("daisy/" + cpath)
|
super().__init__(cpath, daisyCryptography)
|
||||||
|
self.ephemeral = {}
|
||||||
|
|
||||||
def update(self, entry: str, data, recur: bool = True):
|
def createEmpty(self, key: str):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
Create empty record at the given key
|
||||||
|
|
||||||
|
key: str
|
||||||
|
Key to create empty record at
|
||||||
|
"""
|
||||||
|
self.msg[key] = {}
|
||||||
|
|
||||||
|
def update(self, entry: str, data, recur: bool = True, write=True):
|
||||||
|
"""
|
||||||
|
Update given record
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
entry: str
|
||||||
|
Key to update record of
|
||||||
|
|
||||||
|
data
|
||||||
|
Data to update record with
|
||||||
|
|
||||||
|
recur: bool
|
||||||
|
Whether to iterate over data
|
||||||
|
|
||||||
|
write: bool
|
||||||
|
Whether record is ephemeral
|
||||||
|
"""
|
||||||
|
if write:
|
||||||
if recur:
|
if recur:
|
||||||
|
if entry not in self.msg.keys():
|
||||||
|
self.createEmpty(entry)
|
||||||
for key in data.keys():
|
for key in data.keys():
|
||||||
self.msg[entry][key] = data[key]
|
self.msg[entry][key] = data[key]
|
||||||
else:
|
else:
|
||||||
self.msg[entry] = data
|
self.msg[entry] = data
|
||||||
self.write()
|
self.write()
|
||||||
|
else:
|
||||||
|
if recur:
|
||||||
|
if entry not in self.ephemeral.keys():
|
||||||
|
self.ephemeral[entry] = {}
|
||||||
|
for key in data.keys():
|
||||||
|
self.ephemeral[entry][key] = data[key]
|
||||||
|
else:
|
||||||
|
self.ephemeral[entry] = data
|
||||||
|
|
||||||
def getRecord(self, key: str):
|
def getRecord(self, key: str, ephemeral=False):
|
||||||
|
"""
|
||||||
|
Get record at key
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
key: str
|
||||||
|
|
||||||
|
ephemeral: bool
|
||||||
|
Whether key is only in memory, used for session cryptography credentials currently
|
||||||
|
"""
|
||||||
|
logger.debug(key)
|
||||||
|
try:
|
||||||
|
if ephemeral:
|
||||||
|
if key in self.ephemeral.keys():
|
||||||
|
return self.ephemeral[key]
|
||||||
|
else:
|
||||||
|
logger.info("Record does not exist")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
if key in self.get().keys():
|
if key in self.get().keys():
|
||||||
return self.get()[key]
|
return self.get()[key]
|
||||||
else:
|
else:
|
||||||
self.cLog(20, "Record does not exist")
|
logger.info("Record does not exist")
|
||||||
return False
|
return False
|
||||||
|
except Exception:
|
||||||
|
logger.warning(traceback.format_exc())
|
||||||
|
|
|
@ -30,7 +30,7 @@ class Header(Packet):
|
||||||
Whether a response should be sent when the message completes reception (TODO)
|
Whether a response should be sent when the message completes reception (TODO)
|
||||||
|
|
||||||
pAction: int
|
pAction: int
|
||||||
3 digit (maximum) pAction ID for mapping precise actions within a protocol (TODO)
|
3 digit (maximum) pAction ID for mapping precise actions within a protocol
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
@ -39,29 +39,49 @@ class Header(Packet):
|
||||||
packetCount: int,
|
packetCount: int,
|
||||||
sender: int,
|
sender: int,
|
||||||
senderDisplayName: int,
|
senderDisplayName: int,
|
||||||
|
sourceNode: int,
|
||||||
recipient: int,
|
recipient: int,
|
||||||
recipientNode: int,
|
recipientNode: int,
|
||||||
subpacket: bool = False,
|
|
||||||
wantFullResponse: bool = False,
|
wantFullResponse: bool = False,
|
||||||
packetsClass: int = 0,
|
packetsClass: int = 0,
|
||||||
pAction: int = -1,
|
pAction: int = -1,
|
||||||
|
target=True,
|
||||||
):
|
):
|
||||||
|
"""
|
||||||
|
Arguments
|
||||||
|
---------
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
packetsClass: int
|
||||||
|
Integer ID matching the class of the message
|
||||||
|
|
||||||
|
target
|
||||||
|
Whether the message is being sent to a target, if so, where
|
||||||
|
"""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
"", packetsID=packetsID, packetCount=packetCount, packetsClass=packetsClass
|
b"", packetsID=packetsID, packetCount=packetCount, packetsClass=packetsClass
|
||||||
)
|
)
|
||||||
|
self.target = target
|
||||||
self.sender = sender
|
self.sender = sender
|
||||||
self.senderDisplayName = senderDisplayName
|
self.senderDisplayName = senderDisplayName
|
||||||
|
if target:
|
||||||
self.recipient = recipient
|
self.recipient = recipient
|
||||||
self.recipientNode = recipientNode
|
self.recipientNode = recipientNode
|
||||||
self.subpacket = subpacket
|
else:
|
||||||
|
self.recipient = -1
|
||||||
|
self.recipientNode = -1
|
||||||
|
self.submessages = []
|
||||||
self.wantFullResponse = wantFullResponse
|
self.wantFullResponse = wantFullResponse
|
||||||
self.pAction = pAction
|
self.pAction = pAction
|
||||||
|
self.sourceNode = sourceNode
|
||||||
|
self.packetCount = packetCount
|
||||||
|
|
||||||
def usePreset(self, path: str):
|
def usePreset(self, path: str, daisyCryptography):
|
||||||
"""
|
"""
|
||||||
Add preset fields to the packet
|
Add preset fields to the packet, currently unused
|
||||||
"""
|
"""
|
||||||
preset = Daisy(path)
|
preset = Daisy(path, daisyCryptography)
|
||||||
for key in preset.get().keys():
|
for key in preset.get().keys():
|
||||||
self.msg[key] = preset.get()[key]
|
self.msg[key] = preset.get()[key]
|
||||||
|
|
||||||
|
@ -72,11 +92,13 @@ class Header(Packet):
|
||||||
res = msgpack.loads(super().dump())
|
res = msgpack.loads(super().dump())
|
||||||
res["sender"] = self.sender
|
res["sender"] = self.sender
|
||||||
res["senderDisplayName"] = self.senderDisplayName
|
res["senderDisplayName"] = self.senderDisplayName
|
||||||
|
res["sourceNode"] = self.sourceNode
|
||||||
res["recipient"] = self.recipient
|
res["recipient"] = self.recipient
|
||||||
res["recipientNode"] = self.recipientNode
|
res["recipientNode"] = self.recipientNode
|
||||||
res["subpacket"] = self.subpacket
|
res["submessages"] = self.submessages
|
||||||
res["wantFullResponse"] = self.wantFullResponse
|
res["wantFullResponse"] = self.wantFullResponse
|
||||||
res["packetsClass"] = self.packetsClass
|
res["packetsClass"] = self.packetsClass
|
||||||
res["pAction"] = self.pAction
|
res["pAction"] = self.pAction
|
||||||
|
res["packetCount"] = self.packetCount
|
||||||
|
|
||||||
return msgpack.dumps(res)
|
return msgpack.dumps(res)
|
||||||
|
|
|
@ -4,9 +4,15 @@ import lzma
|
||||||
import msgpack
|
import msgpack
|
||||||
import random
|
import random
|
||||||
import math
|
import math
|
||||||
|
import logging
|
||||||
|
|
||||||
# DO NOT CHANGE DATA SIZE UNLESS YOU KNOW WHAT YOURE DOING
|
# DO NOT CHANGE DATA SIZE UNLESS YOU KNOW WHAT YOURE DOING
|
||||||
|
|
||||||
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
|
def dict2bytes(cdict: dict):
|
||||||
|
return lzma.compress(msgpack.dumps(cdict))
|
||||||
|
|
||||||
|
|
||||||
class Message:
|
class Message:
|
||||||
"""
|
"""
|
||||||
|
@ -25,12 +31,18 @@ class Message:
|
||||||
bytesObject: bytes,
|
bytesObject: bytes,
|
||||||
sender: int,
|
sender: int,
|
||||||
senderDisplayName: int,
|
senderDisplayName: int,
|
||||||
|
sourceNode,
|
||||||
recipient: int,
|
recipient: int,
|
||||||
recipientNode: int,
|
recipientNode: int,
|
||||||
cryptographyInfo,
|
cryptographyInfo,
|
||||||
|
packetsClass,
|
||||||
|
pAction,
|
||||||
dataSize: int = 128,
|
dataSize: int = 128,
|
||||||
wantFullResponse: bool = False,
|
wantFullResponse: bool = False,
|
||||||
packetsClass: int = 0,
|
target=True,
|
||||||
|
subMessage=False,
|
||||||
|
primaryMessage=None,
|
||||||
|
pskEncrypt=False
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Parameters
|
Parameters
|
||||||
|
@ -44,21 +56,47 @@ class Message:
|
||||||
senderDisplayName: int
|
senderDisplayName: int
|
||||||
3 digit (maximum) ID for mapping display names to a given user
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
recipient: int
|
recipient: int
|
||||||
6 digit (maximum) node or peer ID
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
recipientNode: int
|
recipientNode: int
|
||||||
6 digit (maximum) node ID to route the packet to
|
6 digit (maximum) node ID to route the packet to
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
packetsClass: int
|
||||||
|
Which protocol the packets are using
|
||||||
|
|
||||||
|
pAction: int
|
||||||
|
3 digit (maximum) pAction ID for mapping precise actions within a protocol
|
||||||
|
|
||||||
dataSize: int
|
dataSize: int
|
||||||
Size to cut the bytesObject into per packet
|
Size to cut the bytesObject into per packet
|
||||||
|
|
||||||
wantFullResponse: bool
|
wantFullResponse: bool
|
||||||
Whether to send a response when the message has completed reception (TODO: Kill all retries for associated packets when received)
|
Whether to send a response when the message has completed reception (TODO: Kill all retries for associated packets when received)
|
||||||
|
|
||||||
packetsClass: int
|
target
|
||||||
Which protocol the packets are using
|
Whether the message is being sent to a target, if so, where
|
||||||
|
|
||||||
|
subMessage: bool
|
||||||
|
Whether this is a submessage
|
||||||
|
|
||||||
|
primaryMessage
|
||||||
|
Primary message this is a submessage to, if this is a submessage
|
||||||
|
|
||||||
|
pskEncrypt: bool
|
||||||
|
Whether to encrypt the message with the pre shared key
|
||||||
"""
|
"""
|
||||||
|
self.recipientNode = recipientNode
|
||||||
|
self.target = target
|
||||||
|
self.subMessage = subMessage
|
||||||
|
if subMessage:
|
||||||
|
self.primaryMessage = primaryMessage
|
||||||
if isinstance(bytesObject, list):
|
if isinstance(bytesObject, list):
|
||||||
packets = [h.Header(bytesObject[0])]
|
packets = [h.Header(bytesObject[0])]
|
||||||
for packet in bytesObject:
|
for packet in bytesObject:
|
||||||
|
@ -72,13 +110,18 @@ class Message:
|
||||||
)
|
)
|
||||||
self.packets = packets
|
self.packets = packets
|
||||||
else:
|
else:
|
||||||
# Data passed in by peers should already have been e2ee encrypted by SubtleCrypto
|
# TODO: Data passed in by peers should already have been e2ee encrypted by SubtleCrypto
|
||||||
# Transport encryption
|
if subMessage == False:
|
||||||
# bytesObject = lzma.compress(bytesObject, str(recipientNode).zfill(6), isDict=False)
|
bytesObject, nonce, tag = cryptographyInfo.encrypt(
|
||||||
bytesObject = cryptographyInfo.encrypt(bytesObject, self.no)
|
bytesObject, str(recipientNode).zfill(6), isDict=False, pskEncrypt=pskEncrypt
|
||||||
|
)
|
||||||
|
# logger.debug(bytesObject)
|
||||||
|
self.nonce = nonce
|
||||||
|
self.tag = tag
|
||||||
packets = []
|
packets = []
|
||||||
self.packetsID = random.randrange(0, 999999)
|
self.packetsID = random.randrange(0, 999999)
|
||||||
pnum = 1
|
pnum = 1
|
||||||
|
dataSize = 80
|
||||||
blen = math.ceil(len(bytesObject) / dataSize)
|
blen = math.ceil(len(bytesObject) / dataSize)
|
||||||
tb = b""
|
tb = b""
|
||||||
for it in range(blen):
|
for it in range(blen):
|
||||||
|
@ -86,11 +129,25 @@ class Message:
|
||||||
b = bytesObject[it * dataSize:]
|
b = bytesObject[it * dataSize:]
|
||||||
else:
|
else:
|
||||||
b = bytesObject[it * dataSize: (it * dataSize + dataSize)]
|
b = bytesObject[it * dataSize: (it * dataSize + dataSize)]
|
||||||
|
if subMessage:
|
||||||
|
packets.append(
|
||||||
|
p.Packet(
|
||||||
|
b,
|
||||||
|
self.packetsID,
|
||||||
|
pnum,
|
||||||
|
packetsClass=packetsClass,
|
||||||
|
primaryMessage=primaryMessage,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
packets.append(
|
packets.append(
|
||||||
p.Packet(b, self.packetsID, pnum, packetsClass=packetsClass)
|
p.Packet(b, self.packetsID, pnum, packetsClass=packetsClass)
|
||||||
)
|
)
|
||||||
pnum += 1
|
pnum += 1
|
||||||
tb += b
|
tb += b
|
||||||
|
if subMessage:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
packets.insert(
|
packets.insert(
|
||||||
0,
|
0,
|
||||||
h.Header(
|
h.Header(
|
||||||
|
@ -98,18 +155,26 @@ class Message:
|
||||||
pnum,
|
pnum,
|
||||||
sender,
|
sender,
|
||||||
senderDisplayName,
|
senderDisplayName,
|
||||||
|
sourceNode,
|
||||||
recipient,
|
recipient,
|
||||||
recipientNode,
|
recipientNode,
|
||||||
wantFullResponse=wantFullResponse,
|
wantFullResponse=wantFullResponse,
|
||||||
packetsClass=packetsClass,
|
packetsClass=packetsClass,
|
||||||
|
pAction=pAction,
|
||||||
|
target=target,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self.fullPackets = [p for p in packets]
|
||||||
|
|
||||||
|
if subMessage:
|
||||||
|
pnum -= 1
|
||||||
|
|
||||||
for it in range(pnum):
|
for it in range(pnum):
|
||||||
packet = msgpack.loads(packets[it].dump())
|
packet = msgpack.loads(packets[it].dump())
|
||||||
packet["packetCount"] = pnum
|
packet["packetCount"] = pnum
|
||||||
|
|
||||||
packets[it] = msgpack.dumps(packet)
|
packets[it] = msgpack.dumps(packet)
|
||||||
|
|
||||||
self.packets = packets
|
self.packets = packets
|
||||||
|
|
||||||
def get(self) -> list[p.Packet]:
|
def get(self) -> list[p.Packet]:
|
||||||
|
@ -118,12 +183,47 @@ class Message:
|
||||||
"""
|
"""
|
||||||
return self.packets
|
return self.packets
|
||||||
|
|
||||||
def reassemble(self, completedMessage: dict):
|
def reassemble(self, completedMessage: dict, cryptographyInfo, subMessage=False, yctx=None, packetCount=None):
|
||||||
"""
|
"""
|
||||||
Reassemble packets from a completed message in `Sponge.base`
|
Reassemble packets from a completed message in `Sponge.base`, meant to be used without instantiation
|
||||||
|
|
||||||
|
Arguments
|
||||||
|
---------
|
||||||
|
completedMessage: dict
|
||||||
|
All parts of the message and submessage
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
subMessage: bool
|
||||||
|
Whether this is a submessage
|
||||||
|
|
||||||
|
yctx
|
||||||
|
Message parsing context
|
||||||
|
|
||||||
|
packetCount
|
||||||
|
Number of packets
|
||||||
"""
|
"""
|
||||||
data = b""
|
data = b""
|
||||||
for it in range(1, int(completedMessage["packetCount"])):
|
sourceNode = None
|
||||||
|
if subMessage:
|
||||||
|
sourceNode = yctx["sourceNode"]["val"]
|
||||||
|
for it in range(1, packetCount+1):
|
||||||
data += completedMessage["data"][completedMessage["dataOrder"].index(it)]
|
data += completedMessage["data"][completedMessage["dataOrder"].index(it)]
|
||||||
res = msgpack.loads(lzma.decompress(data))
|
data = msgpack.loads(lzma.decompress(data))
|
||||||
return res
|
# logger.debug(data)
|
||||||
|
# logger.debug(completedMessage["data"])
|
||||||
|
# logger.debug(completedMessage["dataOrder"])
|
||||||
|
else:
|
||||||
|
packetCount = int(completedMessage.yctx["packetCount"]["val"])
|
||||||
|
sourceNode = completedMessage.yctx["sourceNode"]["val"]
|
||||||
|
# logger.debug(completedMessage.data)
|
||||||
|
for it in range(1, packetCount):
|
||||||
|
if it in completedMessage.dataOrder:
|
||||||
|
data += completedMessage.data[completedMessage.dataOrder.index(it)]
|
||||||
|
# logger.debug("pre decrypt")
|
||||||
|
# logger.debug(data)
|
||||||
|
data = cryptographyInfo.decrypt(
|
||||||
|
data, sourceNode, completedMessage.nonce, completedMessage.tag
|
||||||
|
)
|
||||||
|
return data
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
# WARNING: DO NOT TRY TO POKE A BARNACLE
|
|
@ -0,0 +1,55 @@
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
|
class Bubble(Message):
|
||||||
|
"""
|
||||||
|
Send data from peer to peer
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
data,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient
|
||||||
|
6 digit (maximum) recipient peer ID
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
6 digit (maxmium) recipient node ID
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
data
|
||||||
|
Data to send to peer
|
||||||
|
"""
|
||||||
|
bytesOb = Packets.Message.dict2bytes({"data": data, "recipient": recipient, "target": "bubble"})
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
2,
|
||||||
|
0,
|
||||||
|
)
|
|
@ -0,0 +1,6 @@
|
||||||
|
bubble.Bubble
|
||||||
|
=============
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.bubble.Bubble.Bubble
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -0,0 +1,59 @@
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
|
class IndexSync(Message):
|
||||||
|
"""
|
||||||
|
Sync indices of Catchs across nodes
|
||||||
|
"""
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
index,
|
||||||
|
target=False
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient
|
||||||
|
6 digit (maximum) recipient peer ID
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
6 digit (maxmium) recipient node ID
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
index: dict
|
||||||
|
Index of catch's to sync across nodes
|
||||||
|
|
||||||
|
target: bool
|
||||||
|
Whether to send this to a specific target (str) or just broadcast (False)
|
||||||
|
"""
|
||||||
|
bytesOb = Packets.Message.dict2bytes({"index": index})
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
target=target
|
||||||
|
)
|
|
@ -0,0 +1,6 @@
|
||||||
|
catch.IndexSync
|
||||||
|
===============
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.catch.IndexSync.IndexSync
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -0,0 +1,68 @@
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
|
class CatchRequest(Message):
|
||||||
|
"""
|
||||||
|
Request Catch (website) from another node
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
head,
|
||||||
|
body,
|
||||||
|
fins,
|
||||||
|
pskEncrypt=False
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient
|
||||||
|
6 digit (maximum) recipient peer ID
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
6 digit (maxmium) recipient node ID
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
head: str
|
||||||
|
First part of the Catch (4 characters max)
|
||||||
|
|
||||||
|
body: str
|
||||||
|
Second part of the Catch (8 characters max)
|
||||||
|
|
||||||
|
fins: list[str]
|
||||||
|
Last part of the Catch (6 characters max each)
|
||||||
|
|
||||||
|
pskEncrypt: bool
|
||||||
|
Whether to encrypt with PSK
|
||||||
|
"""
|
||||||
|
bytesOb = Packets.Message.dict2bytes({"head": head, "body": body, "fins": fins, "recipient": sender, "recipientNode": sourceNode})
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
1,
|
||||||
|
0,
|
||||||
|
pskEncrypt=pskEncrypt
|
||||||
|
)
|
|
@ -0,0 +1,6 @@
|
||||||
|
catch.Request
|
||||||
|
=============
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.catch.Request.CatchRequest
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -0,0 +1,60 @@
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
|
class CatchResponse(Message):
|
||||||
|
"""
|
||||||
|
Send local Catch (website) to user who requested it
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
html,
|
||||||
|
pskEncrypt=False
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient
|
||||||
|
6 digit (maximum) recipient peer ID
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
6 digit (maxmium) recipient node ID
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
html
|
||||||
|
Contents of Catch to send back
|
||||||
|
|
||||||
|
pskEncrypt:
|
||||||
|
Whether to encrypt with PSK
|
||||||
|
"""
|
||||||
|
bytesOb = Packets.Message.dict2bytes({"html": html, "recipient": recipient, "target": "catch"})
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
1,
|
||||||
|
1,
|
||||||
|
pskEncrypt=pskEncrypt
|
||||||
|
)
|
|
@ -0,0 +1,6 @@
|
||||||
|
catch.Response
|
||||||
|
==============
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.catch.Response.CatchResponse
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -0,0 +1,71 @@
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
|
class Handshake(Message):
|
||||||
|
"""
|
||||||
|
Provides the ephemeral key for session encryption
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, sender, senderID, recipient, recipientNode, cryptographyInfo, onodeID, sourceNode
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient
|
||||||
|
6 digit (maximum) recipient peer ID
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
6 digit (maxmium) recipient node ID
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
onodeID
|
||||||
|
Node identifier for current node
|
||||||
|
|
||||||
|
sourceNode
|
||||||
|
Source of request
|
||||||
|
"""
|
||||||
|
ephemeralKey = None
|
||||||
|
record = cryptographyInfo.getRecord("key", "self")
|
||||||
|
if record:
|
||||||
|
if "ourEphemeralKey" in record.keys():
|
||||||
|
ephemeralKey = record["ourEphemeralKey"]
|
||||||
|
else:
|
||||||
|
cryptographyInfo.genOurEphemeralKey(onodeID)
|
||||||
|
record = cryptographyInfo.getRecord("key", onodeID, ephemeral=True)
|
||||||
|
ephemeralKey = record["ourEphemeralKey"].export_key(
|
||||||
|
format="PEM",
|
||||||
|
prot_params={"iteration_count": 131072}
|
||||||
|
)
|
||||||
|
if "staticKey" not in record.keys():
|
||||||
|
cryptographyInfo.genStaticKey(onodeID)
|
||||||
|
else:
|
||||||
|
raise Exception("Node does not exist")
|
||||||
|
|
||||||
|
bytesOb = Packets.Message.dict2bytes(
|
||||||
|
{"ephemeralKey": ephemeralKey}
|
||||||
|
)
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
3,
|
||||||
|
0,
|
||||||
|
pskEncrypt=True
|
||||||
|
)
|
|
@ -0,0 +1,6 @@
|
||||||
|
cryptography.Handshake
|
||||||
|
======================
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.cryptography.Handshake.Handshake
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -0,0 +1,64 @@
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
|
class HopperRequest(Message):
|
||||||
|
"""
|
||||||
|
Proxy request to main internet from remote node
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
url: str,
|
||||||
|
params: dict,
|
||||||
|
method: str,
|
||||||
|
cryptographyInfo,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient
|
||||||
|
6 digit (maximum) recipient peer ID
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
6 digit (maxmium) recipient node ID
|
||||||
|
|
||||||
|
url: str
|
||||||
|
URL to parse
|
||||||
|
|
||||||
|
params: dict
|
||||||
|
Parameters to add to the request for the URL
|
||||||
|
|
||||||
|
method: str
|
||||||
|
Method to use for request (GET/POST currently)
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
"""
|
||||||
|
bytesOb = Packets.Message.dict2bytes({"url": url, "parameters": params, "method": method, "recipient": sender, "recipientNode": sourceNode})
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
5,
|
||||||
|
0,
|
||||||
|
)
|
|
@ -0,0 +1,6 @@
|
||||||
|
hopper.Request
|
||||||
|
==============
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.hopper.Request.HopperRequest
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -0,0 +1,49 @@
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
|
||||||
|
|
||||||
|
class HopperResponse(Message):
|
||||||
|
"""
|
||||||
|
Send proxied request back to requester
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, sender, senderID, sourceNode, recipient, recipientNode, response, cryptographyInfo
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
recipient
|
||||||
|
6 digit (maximum) recipient peer ID
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
6 digit (maxmium) recipient node ID
|
||||||
|
|
||||||
|
response
|
||||||
|
Data from proxied request
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
"""
|
||||||
|
bytesOb = Packets.Message.dict2bytes({"res": response, "recipient": recipient, "target": "hopper"})
|
||||||
|
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
5,
|
||||||
|
1
|
||||||
|
)
|
|
@ -0,0 +1,6 @@
|
||||||
|
hopper.Response
|
||||||
|
===============
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Messages.Protocols.hopper.Response.HopperResponse
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
|
@ -0,0 +1,57 @@
|
||||||
|
from Packets.Message import Message
|
||||||
|
import Packets.Message
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger("__main__." + __name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AnnounceMessage(Message):
|
||||||
|
"""
|
||||||
|
Announce the network map details and public key of the node for discovery
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
mapping,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sender
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderID
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
sourceNode: int
|
||||||
|
Source of request
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.Transport
|
||||||
|
Cryptography instance for encrypting message
|
||||||
|
|
||||||
|
mapping: dict
|
||||||
|
Network map
|
||||||
|
"""
|
||||||
|
mapping["publicKey"] = cryptographyInfo.getRecord("key", "self")["publicKey"]
|
||||||
|
recipient = -1
|
||||||
|
recipientNode = -1
|
||||||
|
bytesOb = Packets.Message.dict2bytes(mapping)
|
||||||
|
# logger.debug(10, "Mapping bytes")
|
||||||
|
# logger.debug(10, bytesOb)
|
||||||
|
super().__init__(
|
||||||
|
bytesOb,
|
||||||
|
sender,
|
||||||
|
senderID,
|
||||||
|
sourceNode,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
cryptographyInfo,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
target=False,
|
||||||
|
)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue