Merge pull request 'nightly' (#73) from nightly into main
Reviewed-on: #73
|
@ -2,9 +2,9 @@ bin/
|
||||||
lib*
|
lib*
|
||||||
share/
|
share/
|
||||||
**/__pycache__/
|
**/__pycache__/
|
||||||
src/webui/build/
|
src/Splash/build/
|
||||||
src/webui/htmx-extensions/
|
src/Splash/htmx-extensions/
|
||||||
src/webui/res/js/node_modules/
|
src/Splash/res/js/node_modules/
|
||||||
src/daisy/
|
src/daisy/
|
||||||
src/catch/
|
src/catch/
|
||||||
src/logs/
|
src/logs/
|
||||||
|
|
|
@ -6,4 +6,4 @@ mv docs.tmp/* docs
|
||||||
rm -rf docs.tmp
|
rm -rf docs.tmp
|
||||||
mv docs/index.md docs/readme.md
|
mv docs/index.md docs/readme.md
|
||||||
sed -i '1s;^;![PierMesh logo](https://git.utopic.work/PierMesh/piermesh/raw/branch/main/piermeshicon.png)\n\n;' docs/readme.md
|
sed -i '1s;^;![PierMesh logo](https://git.utopic.work/PierMesh/piermesh/raw/branch/main/piermeshicon.png)\n\n;' docs/readme.md
|
||||||
sed -i '1s;^;![Daisy logo](https://git.utopic.work/PierMesh/piermesh/raw/branch/main/imgs/daisydisplay.png)\n\n;' docs/Components/daisy.md
|
sed -i '1s;^;![Daisy logo](https://git.utopic.work/PierMesh/piermesh/raw/branch/main/imgs/daisydisplay.png)\n\n;' docs/Daisy/Daisy.md
|
||||||
|
|
|
@ -0,0 +1,117 @@
|
||||||
|
Diffie hellman ephemeral
|
||||||
|
Fernet based encryption
|
||||||
|
==========================
|
||||||
|
|
||||||
|
### *class* Cryptography.WhaleSong.DHEFern(cache, nodeNickname, cLog)
|
||||||
|
|
||||||
|
#### cLog
|
||||||
|
|
||||||
|
Method reference to run.Node.cLog so we can log to the ui from here
|
||||||
|
|
||||||
|
#### loadedParams
|
||||||
|
|
||||||
|
In memory representations of cryptography parameters
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### loadedKeys
|
||||||
|
|
||||||
|
In memory representations of cryptography keys
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### nodeNickname
|
||||||
|
|
||||||
|
Name of node for isolating configs when running multiple nodes
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
str
|
||||||
|
|
||||||
|
#### cache
|
||||||
|
|
||||||
|
Daisy cache for use in storing cryptography information
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
Components.daisy.Cache
|
||||||
|
|
||||||
|
#### publicKey
|
||||||
|
|
||||||
|
Public key for node
|
||||||
|
|
||||||
|
#### privateKey
|
||||||
|
|
||||||
|
Private key for node
|
||||||
|
|
||||||
|
#### checkInMem(store: str, nodeID: str)
|
||||||
|
|
||||||
|
Check if parameters or keys are loaded for node of nodeID
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
**store** (*str*) – Whether to check loaded keys or parameters
|
||||||
|
|
||||||
|
#### decrypt(data, nodeID: str)
|
||||||
|
|
||||||
|
Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load)
|
||||||
|
|
||||||
|
#### encrypt(data, nodeID: str, isDict: bool = True)
|
||||||
|
|
||||||
|
Do Fernet encryption
|
||||||
|
|
||||||
|
data
|
||||||
|
: Either bytes or dict to encrypt
|
||||||
|
|
||||||
|
isDict: bool
|
||||||
|
: Whether data is a dictionary
|
||||||
|
|
||||||
|
#### genKeyPair(paramsOverride=False, setSelf: bool = True)
|
||||||
|
|
||||||
|
Generate public and private keys from self.params (TODO: Gen from passed params)
|
||||||
|
|
||||||
|
paramsOverride
|
||||||
|
: False or parameters to use (TODO)
|
||||||
|
|
||||||
|
setSelf: bool
|
||||||
|
: Whether to set self.privateKey and self.publicKey
|
||||||
|
|
||||||
|
#### genParams()
|
||||||
|
|
||||||
|
Generate Diffie Hellman parameters
|
||||||
|
|
||||||
|
#### getParamsBytes()
|
||||||
|
|
||||||
|
Get bytes encoded from self.parameters (TODO: Encode from store)
|
||||||
|
|
||||||
|
#### getRecord(store: str, key: str)
|
||||||
|
|
||||||
|
Get record from store: store with key: key
|
||||||
|
|
||||||
|
#### getSalt()
|
||||||
|
|
||||||
|
Get random salt
|
||||||
|
|
||||||
|
#### initStore(store: str)
|
||||||
|
|
||||||
|
Initialize store: store
|
||||||
|
|
||||||
|
#### keyDerive(pubKey: bytes, salt: bytes, nodeID: str, params: bytes)
|
||||||
|
|
||||||
|
Derive shared key using Diffie Hellman
|
||||||
|
|
||||||
|
pubKey: bytes
|
||||||
|
: Public key
|
||||||
|
|
||||||
|
nodeID: str
|
||||||
|
: PierMesh node ID
|
||||||
|
|
||||||
|
params: bytes
|
||||||
|
: Encryption parameters
|
||||||
|
|
||||||
|
#### loadParamBytes(pemBytes: bytes)
|
||||||
|
|
||||||
|
Load parameters to self.params from given bytes (TODO: Load from store)
|
||||||
|
|
||||||
|
#### loadRecordToMem(store: str, nodeID: str)
|
||||||
|
|
||||||
|
Load record of nodeID from store to either keys or pameters
|
|
@ -0,0 +1,34 @@
|
||||||
|
# Daisy based cache
|
||||||
|
|
||||||
|
### *class* Daisy.Cache.Cache(filepaths=None, cacheFile=None, path: str = 'daisy', walk: bool = False, isCatch: bool = False)
|
||||||
|
|
||||||
|
In memory collection of Daisy records
|
||||||
|
|
||||||
|
#### create(path: str, data: dict)
|
||||||
|
|
||||||
|
Create new record
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
* **path** (*str*) – Path to create record at
|
||||||
|
* **data** (*dict*) – Data to populate record with
|
||||||
|
|
||||||
|
#### get(path: str)
|
||||||
|
|
||||||
|
Get record at path, else return False
|
||||||
|
|
||||||
|
path: str
|
||||||
|
: Path of record
|
||||||
|
|
||||||
|
#### refresh()
|
||||||
|
|
||||||
|
Reload from disk to memory
|
||||||
|
|
||||||
|
#### search(keydict: dict, strict: bool = True)
|
||||||
|
|
||||||
|
Search cache for record for records with values
|
||||||
|
|
||||||
|
keydict: dict
|
||||||
|
: Values to search for
|
||||||
|
|
||||||
|
strict: bool
|
||||||
|
: Whether to require values match
|
|
@ -0,0 +1,22 @@
|
||||||
|
Daisy cache for catchs,
|
||||||
|
PierMesh’s domain analog
|
||||||
|
==========================
|
||||||
|
|
||||||
|
### *class* Daisy.Catch.Catch(path: str = 'catch', filepaths=None, catchFile=None, walk: bool = False)
|
||||||
|
|
||||||
|
Sub class of Cache for handling catchs
|
||||||
|
|
||||||
|
![image](https://git.utopic.work/PierMesh/piermesh/raw/branch/main/imgs/catchdisplay.png)
|
||||||
|
|
||||||
|
#### get(head: str, tail: str, fins=None)
|
||||||
|
|
||||||
|
Get catch by pieces
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
* **head** (*str*) – First part of catch (maximum: 4 characters)
|
||||||
|
* **tail** (*str*) – Second part of catch (maximum: 16 characters)
|
||||||
|
* **fins** – List of (maximum 8 characters) strings at the end of the catch oe None if none
|
||||||
|
|
||||||
|
#### sget(path: str)
|
||||||
|
|
||||||
|
Call Cache’s get to get record
|
|
@ -0,0 +1,56 @@
|
||||||
|
![Daisy logo](https://git.utopic.work/PierMesh/piermesh/raw/branch/main/imgs/daisydisplay.png)
|
||||||
|
|
||||||
|
Schemaless binary database
|
||||||
|
base class
|
||||||
|
==========================
|
||||||
|
|
||||||
|
### *class* Daisy.Daisy.Daisy(filepath: str, templates: dict = {}, template: bool = False, prefillDict: bool = False)
|
||||||
|
|
||||||
|
Base class for Daisy data representation
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Components/daisy.py)
|
||||||
|
|
||||||
|
#### filepath
|
||||||
|
|
||||||
|
Path to file representation on disk
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
str
|
||||||
|
|
||||||
|
#### msg
|
||||||
|
|
||||||
|
In memory representation
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### get()
|
||||||
|
|
||||||
|
Get record dictionary from memory
|
||||||
|
|
||||||
|
* **Returns:**
|
||||||
|
**self.msg**
|
||||||
|
* **Return type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### read(decrypt: bool = False, decryptKey=False)
|
||||||
|
|
||||||
|
Read record from disk to memory
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
* **decrypt** (*bool*) – Whether to decrypt record
|
||||||
|
* **decryptKey** – Key to decrypt record
|
||||||
|
|
||||||
|
#### sublist()
|
||||||
|
|
||||||
|
Lists contents of directory if object is a directory, otherwise return None
|
||||||
|
|
||||||
|
#### write(override=False, encrypt: bool = False, encryptKey=None, recur: bool = False)
|
||||||
|
|
||||||
|
Write record to disk
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
* **override** – Either false or a dictionary of values to set on the record
|
||||||
|
* **encrypt** (*bool*) – Whether to encrypt the record (TODO)
|
||||||
|
* **encryptKey** – Key to encrypt record with, or None if not set
|
||||||
|
* **recur** (*bool*) – Whether to recursively handle keys
|
|
@ -0,0 +1,12 @@
|
||||||
|
# Daisy signal management
|
||||||
|
|
||||||
|
### *class* Daisy.Soil.Compound(cache, isCatch: bool = False)
|
||||||
|
|
||||||
|
File system watcher to propagate disk changes
|
||||||
|
|
||||||
|
#### on_any_event(event)
|
||||||
|
|
||||||
|
Called when a CRUD operation is performed on a record file
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
**event** – Event object provided by watchdog
|
|
@ -0,0 +1,5 @@
|
||||||
|
Daisy based key value
|
||||||
|
store with recursion
|
||||||
|
==========================
|
||||||
|
|
||||||
|
### *class* Daisy.Store.Store(store: str, path: str, nodeNickname: str)
|
|
@ -1,3 +1,7 @@
|
||||||
SubPackets for handling
|
SubPackets for handling
|
||||||
full submessages
|
full submessages
|
||||||
=======================
|
=======================
|
||||||
|
|
||||||
|
### *class* Packets.SubMessage.SubMessage
|
||||||
|
|
||||||
|
TODO
|
|
@ -1,3 +1,7 @@
|
||||||
SubPacket for handling
|
SubPacket for handling
|
||||||
individual packets of submessages
|
individual packets of submessages
|
||||||
=================================
|
=================================
|
||||||
|
|
||||||
|
### *class* Packets.SubPacket.SubPacket
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
|
@ -0,0 +1,52 @@
|
||||||
|
# serve: Web UI server
|
||||||
|
|
||||||
|
### *class* Splash.serve.Server(transceiver, catch, onodeID, network, cLog)
|
||||||
|
|
||||||
|
Web server that serves the web ui and provides web to node communication
|
||||||
|
|
||||||
|
#### cLog
|
||||||
|
|
||||||
|
Reference to run.Node.cLog for logging
|
||||||
|
|
||||||
|
#### transmitter
|
||||||
|
|
||||||
|
Reference to our Transmission.transmission.Transmitter instance
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
Transmission.transmission.Transmitter
|
||||||
|
|
||||||
|
#### network
|
||||||
|
|
||||||
|
Reference to our Siph.Network.Network
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
Siph.Network.Network
|
||||||
|
|
||||||
|
#### nodeID
|
||||||
|
|
||||||
|
String converted PierMesh node ID
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
str
|
||||||
|
|
||||||
|
#### peerIDs
|
||||||
|
|
||||||
|
Map of peer IDs to Websocket sessions
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### app
|
||||||
|
|
||||||
|
Microdot server instance
|
||||||
|
|
||||||
|
#### catch
|
||||||
|
|
||||||
|
Reference to our Catch Cache instance to pull from for serving Catchs
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
[Daisy.Catch.Catch](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch)
|
||||||
|
|
||||||
|
#### *async* sendToPeer(peerID: str, data: str)
|
||||||
|
|
||||||
|
Send data to Websocket of peer with peerID
|
|
@ -6,6 +6,8 @@ Dispatches to Protocols
|
||||||
|
|
||||||
Packet filtering orchestration
|
Packet filtering orchestration
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/base.py)
|
||||||
|
|
||||||
cLog
|
cLog
|
||||||
: Reference to run.Node.cLog for logging
|
: Reference to run.Node.cLog for logging
|
||||||
|
|
||||||
|
@ -45,18 +47,26 @@ Check if this is a self packet, if so skip
|
||||||
|
|
||||||
Base filtering logic, takes a single MeshTastic packet
|
Base filtering logic, takes a single MeshTastic packet
|
||||||
|
|
||||||
### *class* Sponge.Protocols.bubble.filter(completeMessage, recipient, recipientNode, onodeID, todo)
|
#### *async* bubble.filter(recipient, recipientNode, onodeID, todo)
|
||||||
|
|
||||||
Peer to peer protol
|
Peer to peer protol
|
||||||
|
|
||||||
### *class* Sponge.Protocols.catch.filter(completeMessage, recipient, recipientNode, todo)
|
[🔗 Bubble Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/Sponge/Protocols/bubble.py)
|
||||||
|
|
||||||
|
#### *async* catch.filter(recipient, recipientNode, todo)
|
||||||
|
|
||||||
Catch exchange protocol
|
Catch exchange protocol
|
||||||
|
|
||||||
### *class* Sponge.Protocols.cryptography.filter(completeMessage, recipientNode, todo)
|
[🔗 Catch Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py)
|
||||||
|
|
||||||
|
#### *async* cryptography.filter(recipientNode, todo)
|
||||||
|
|
||||||
Cryptographic operations protocol
|
Cryptographic operations protocol
|
||||||
|
|
||||||
### *class* Sponge.Protocols.map.filter(completeMessage, todo)
|
[🔗 Cryptography Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/cryptography.py)
|
||||||
|
|
||||||
|
#### *async* map.filter(todo)
|
||||||
|
|
||||||
Network mapping protocol
|
Network mapping protocol
|
||||||
|
|
||||||
|
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/map.py)
|
||||||
|
|
|
@ -0,0 +1,123 @@
|
||||||
|
# Layer 0 data transceiving
|
||||||
|
|
||||||
|
### *class* Transceiver.Transceiver.Transceiver(device, filter, onodeID, cache, catch, cryptographyInfo, cLog)
|
||||||
|
|
||||||
|
Handling LoRa transceiving
|
||||||
|
|
||||||
|
#### cLog
|
||||||
|
|
||||||
|
Reference to run.Node.cLog for logging
|
||||||
|
|
||||||
|
#### cryptographyInfo
|
||||||
|
|
||||||
|
Cryptography instance for encrypting transmissions
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
[Cryptography.WhaleSong.DHEFern](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern)
|
||||||
|
|
||||||
|
#### filter
|
||||||
|
|
||||||
|
Sponge.base.Filter instance for filtering packets
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
[Sponge.base.Filter](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter)
|
||||||
|
|
||||||
|
#### tcache
|
||||||
|
|
||||||
|
Data backend Daisy Cache
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
[Daisy.Cache.Cache](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md#Daisy.Cache.Cache)
|
||||||
|
|
||||||
|
#### tcatch
|
||||||
|
|
||||||
|
Daisy Catch Cache for Catch operations
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
[Daisy.Catch.Catch](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch)
|
||||||
|
|
||||||
|
#### notConnected
|
||||||
|
|
||||||
|
Whether the transceiver has been connected to yet
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
bool
|
||||||
|
|
||||||
|
#### acks
|
||||||
|
|
||||||
|
Acknowledgements received per packet
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
#### onodeID
|
||||||
|
|
||||||
|
PierMesh node ID
|
||||||
|
|
||||||
|
#### messages
|
||||||
|
|
||||||
|
Message completion acknowldgements
|
||||||
|
|
||||||
|
* **Type:**
|
||||||
|
dict
|
||||||
|
|
||||||
|
### Notes
|
||||||
|
|
||||||
|
TODO: Check if we can remove cpid
|
||||||
|
|
||||||
|
#### *async* addPackets(data, sender, senderName, recipient, recipientNode, directID=False, packetsClass=None, encrypt=False)
|
||||||
|
|
||||||
|
Convert binary data to Message and send each packet
|
||||||
|
:param data: Data to send
|
||||||
|
:type data: bytes
|
||||||
|
:param sender: Peer/Node ID of sender
|
||||||
|
:param senderName: ID matching specific user title
|
||||||
|
:param recipient: Peer/Node ID of recipient
|
||||||
|
:param recipientNode: Node ID of node to route to
|
||||||
|
:param directID: If set send to this Node only
|
||||||
|
:param packetsClass: Protocol for message
|
||||||
|
|
||||||
|
#### *async* announce()
|
||||||
|
|
||||||
|
Announce loop runner
|
||||||
|
|
||||||
|
#### awaitFullResponse(pid)
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
Wait for message completed response
|
||||||
|
|
||||||
|
#### *async* awaitResponse(pid)
|
||||||
|
|
||||||
|
Wait for acknowldgement response
|
||||||
|
|
||||||
|
#### *async* initNodeDH(dhefOb, recipientNode, onodeID)
|
||||||
|
|
||||||
|
Send Diffie Hellman initialization message
|
||||||
|
|
||||||
|
#### onConnection(interface, topic=<class 'pubsub.core.callables.AUTO_TOPIC'>)
|
||||||
|
|
||||||
|
When the node connects start announce loop and end the waiting loop
|
||||||
|
|
||||||
|
#### onReceive(packet, interface)
|
||||||
|
|
||||||
|
Run each received packet through Sponge.base.Filters sieve using a new event loop
|
||||||
|
|
||||||
|
#### *async* progressCheck()
|
||||||
|
|
||||||
|
Checks if acknowldgement was received per packet and if not resends
|
||||||
|
|
||||||
|
#### responseCheck(packet)
|
||||||
|
|
||||||
|
On acknowldgement response set acks based on response
|
||||||
|
|
||||||
|
#### send(packet, recipientNode=False)
|
||||||
|
|
||||||
|
Send individual packet
|
||||||
|
|
||||||
|
* **Parameters:**
|
||||||
|
**recipientNode** – If set send to specified node
|
||||||
|
|
||||||
|
#### *async* sendAnnounce()
|
||||||
|
|
||||||
|
Send an announce packet (contains basic network mapping information) every so often so new nodes autoconnect
|
|
@ -17,7 +17,7 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
* [`Node.cache`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.cache)
|
* [`Node.cache`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.cache)
|
||||||
* [`Node.nodeInfo`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.nodeInfo)
|
* [`Node.nodeInfo`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.nodeInfo)
|
||||||
* [`Node.onodeID`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.onodeID)
|
* [`Node.onodeID`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.onodeID)
|
||||||
* [`Node.oTransmitter`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.oTransmitter)
|
* [`Node.oTransceiver`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.oTransceiver)
|
||||||
* [`Node.processed`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.processed)
|
* [`Node.processed`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.processed)
|
||||||
* [`Node.proc`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.proc)
|
* [`Node.proc`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.proc)
|
||||||
* [`Node.mTasks`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.mTasks)
|
* [`Node.mTasks`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.mTasks)
|
||||||
|
@ -27,7 +27,6 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
* [`Node.action_sendCatch()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_sendCatch)
|
* [`Node.action_sendCatch()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_sendCatch)
|
||||||
* [`Node.action_sendToPeer()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_sendToPeer)
|
* [`Node.action_sendToPeer()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.action_sendToPeer)
|
||||||
* [`Node.cLog()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.cLog)
|
* [`Node.cLog()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.cLog)
|
||||||
* [`Node.monitor()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.monitor)
|
|
||||||
* [`Node.spongeListen()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.spongeListen)
|
* [`Node.spongeListen()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.spongeListen)
|
||||||
* [ui: TUI application](/PierMesh/piermesh/src/branch/main/docs/ui.md)
|
* [ui: TUI application](/PierMesh/piermesh/src/branch/main/docs/ui.md)
|
||||||
* [`TUI`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI)
|
* [`TUI`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI)
|
||||||
|
@ -109,10 +108,10 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
* [`Filter.protoRoute()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.protoRoute)
|
* [`Filter.protoRoute()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.protoRoute)
|
||||||
* [`Filter.selfCheck()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.selfCheck)
|
* [`Filter.selfCheck()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.selfCheck)
|
||||||
* [`Filter.sieve()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.sieve)
|
* [`Filter.sieve()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.sieve)
|
||||||
* [`filter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.bubble.filter)
|
* [`bubble.filter()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.bubble.filter)
|
||||||
* [`filter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.catch.filter)
|
* [`catch.filter()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.catch.filter)
|
||||||
* [`filter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.cryptography.filter)
|
* [`cryptography.filter()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.cryptography.filter)
|
||||||
* [`filter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.map.filter)
|
* [`map.filter()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.map.filter)
|
||||||
* [Header packet: Metadata packet](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md)
|
* [Header packet: Metadata packet](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md)
|
||||||
* [`Header`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header)
|
* [`Header`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header)
|
||||||
* [`Header.sender`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.sender)
|
* [`Header.sender`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.sender)
|
||||||
|
@ -134,6 +133,8 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
||||||
* [`Packet.dump()`](/PierMesh/piermesh/src/branch/main/docs/Packets/Packet.md#Packets.Packet.Packet.dump)
|
* [`Packet.dump()`](/PierMesh/piermesh/src/branch/main/docs/Packets/Packet.md#Packets.Packet.Packet.dump)
|
||||||
* [`Packet.parsePayload()`](/PierMesh/piermesh/src/branch/main/docs/Packets/Packet.md#Packets.Packet.Packet.parsePayload)
|
* [`Packet.parsePayload()`](/PierMesh/piermesh/src/branch/main/docs/Packets/Packet.md#Packets.Packet.Packet.parsePayload)
|
||||||
* [`SinglePacket`](/PierMesh/piermesh/src/branch/main/docs/Packets/SinglePacket.md)
|
* [`SinglePacket`](/PierMesh/piermesh/src/branch/main/docs/Packets/SinglePacket.md)
|
||||||
|
* [`SubMessage`](/PierMesh/piermesh/src/branch/main/docs/Packets/SubMessage.md)
|
||||||
|
* [`SubPacket`](/PierMesh/piermesh/src/branch/main/docs/Packets/SubPacket.md)
|
||||||
* [Layer 0 data transceiving](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md)
|
* [Layer 0 data transceiving](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md)
|
||||||
* [`Transceiver`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver)
|
* [`Transceiver`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver)
|
||||||
* [`Transceiver.cLog`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.cLog)
|
* [`Transceiver.cLog`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.cLog)
|
||||||
|
|
10
docs/run.md
|
@ -62,12 +62,12 @@ PierMesh node ID
|
||||||
* **Type:**
|
* **Type:**
|
||||||
str
|
str
|
||||||
|
|
||||||
#### oTransmitter
|
#### oTransceiver
|
||||||
|
|
||||||
LoRa transmitter Transmitter
|
LoRa transceiver Transceiver
|
||||||
|
|
||||||
* **Type:**
|
* **Type:**
|
||||||
Transmitter
|
[Transceiver](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver)
|
||||||
|
|
||||||
#### processed
|
#### processed
|
||||||
|
|
||||||
|
@ -145,10 +145,6 @@ Convenience function that logs to the ui and log files
|
||||||
* **Return type:**
|
* **Return type:**
|
||||||
None
|
None
|
||||||
|
|
||||||
#### *async* monitor()
|
|
||||||
|
|
||||||
Monitor and log ram and cpu usage
|
|
||||||
|
|
||||||
#### *async* spongeListen()
|
#### *async* spongeListen()
|
||||||
|
|
||||||
Loop to watch for tasks to do
|
Loop to watch for tasks to do
|
||||||
|
|
|
@ -0,0 +1,293 @@
|
||||||
|
import base64
|
||||||
|
import os
|
||||||
|
from cryptography.fernet import Fernet
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import dh
|
||||||
|
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
||||||
|
from cryptography.hazmat.primitives.serialization import (
|
||||||
|
Encoding,
|
||||||
|
NoEncryption,
|
||||||
|
ParameterFormat,
|
||||||
|
PublicFormat,
|
||||||
|
PrivateFormat,
|
||||||
|
)
|
||||||
|
import cryptography.hazmat.primitives.serialization as Serialization
|
||||||
|
import msgpack
|
||||||
|
from Daisy.Store import Store
|
||||||
|
|
||||||
|
# TODO: Different store directories per node
|
||||||
|
|
||||||
|
|
||||||
|
class DHEFern:
|
||||||
|
"""
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
cLog
|
||||||
|
Method reference to `run.Node.cLog` so we can log to the ui from here
|
||||||
|
|
||||||
|
loadedParams: dict
|
||||||
|
In memory representations of cryptography parameters
|
||||||
|
|
||||||
|
loadedKeys: dict
|
||||||
|
In memory representations of cryptography keys
|
||||||
|
|
||||||
|
nodeNickname: str
|
||||||
|
Name of node for isolating configs when running multiple nodes
|
||||||
|
|
||||||
|
cache: Components.daisy.Cache
|
||||||
|
Daisy cache for use in storing cryptography information
|
||||||
|
|
||||||
|
publicKey
|
||||||
|
Public key for node
|
||||||
|
|
||||||
|
privateKey
|
||||||
|
Private key for node
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache, nodeNickname, cLog):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
cache: Components.daisy.Cache
|
||||||
|
Reference to the node instances Daisy cache
|
||||||
|
|
||||||
|
nodeNickname: str
|
||||||
|
Node nickname for record storage
|
||||||
|
|
||||||
|
cLog
|
||||||
|
Reference to `run.Node.cLog`
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.cLog = cLog
|
||||||
|
self.stores = {}
|
||||||
|
self.loadedParams = {}
|
||||||
|
self.loadedKeys = {}
|
||||||
|
self.nodeNickname = nodeNickname
|
||||||
|
self.cache = cache
|
||||||
|
if os.path.exists("daisy/cryptography/{0}/param".format(nodeNickname)) == False:
|
||||||
|
self.initStore("param")
|
||||||
|
else:
|
||||||
|
self.stores["param"] = Store("param", "cryptography", nodeNickname)
|
||||||
|
self.params = self.loadParamBytes(self.stores["param"].get()["self"])
|
||||||
|
self.cLog(20, "Param store initialized")
|
||||||
|
if os.path.exists("daisy/cryptography/{0}/key".format(nodeNickname)) == False:
|
||||||
|
self.cLog(20, "Key store DNE, initializing")
|
||||||
|
self.initStore("key")
|
||||||
|
self.genKeyPair()
|
||||||
|
else:
|
||||||
|
self.cLog(20, "Key store exists, loading")
|
||||||
|
self.stores["key"] = Store("key", "cryptography", nodeNickname)
|
||||||
|
self.cLog(20, "Store loaded")
|
||||||
|
# tks = self.stores["key"].get()
|
||||||
|
# self.publicKey = tks["self"]["publicKey"]
|
||||||
|
# self.privateKey = tks["self"]["privateKey"]
|
||||||
|
self.cLog(20, "Key store initialized")
|
||||||
|
|
||||||
|
def checkInMem(self, store: str, nodeID: str):
|
||||||
|
"""
|
||||||
|
Check if parameters or keys are loaded for node of nodeID
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
store: str
|
||||||
|
Whether to check loaded keys or parameters
|
||||||
|
|
||||||
|
"""
|
||||||
|
if store == "param":
|
||||||
|
return nodeID in self.loadedParams.keys()
|
||||||
|
elif store == "key":
|
||||||
|
return nodeID in self.loadedKeys.keys()
|
||||||
|
|
||||||
|
def loadRecordToMem(self, store: str, nodeID: str):
|
||||||
|
"""
|
||||||
|
Load record of nodeID from store to either keys or pameters
|
||||||
|
"""
|
||||||
|
r = self.getRecord(store, nodeID)
|
||||||
|
if r == False:
|
||||||
|
self.cLog(
|
||||||
|
30, "Tried to load nonexistent {0} for node {1}".format(store, nodeID)
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
elif self.checkInMem(store, nodeID):
|
||||||
|
self.cLog(10, "{0}s already deserialized, skipping".format(store))
|
||||||
|
else:
|
||||||
|
if store == "param":
|
||||||
|
self.loadedParams[nodeID] = self.loadParamBytes(r)
|
||||||
|
elif store == "key":
|
||||||
|
self.loadedKeys[nodeID] = {
|
||||||
|
"publicKey": Serialization.load_pem_public_key(r["publicKey"]),
|
||||||
|
"privateKey": Serialization.load_pem_private_key(
|
||||||
|
r["privateKey"], None
|
||||||
|
),
|
||||||
|
}
|
||||||
|
return True
|
||||||
|
|
||||||
|
def getRecord(self, store: str, key: str):
|
||||||
|
"""
|
||||||
|
Get record from store: store with key: key
|
||||||
|
"""
|
||||||
|
r = stores[store].getRecord(key)
|
||||||
|
if r == False:
|
||||||
|
self.cLog(20, "Record does not exist")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return r
|
||||||
|
|
||||||
|
def initStore(self, store: str):
|
||||||
|
"""
|
||||||
|
Initialize store: store
|
||||||
|
"""
|
||||||
|
self.stores[store] = Store(store, "cryptography", self.nodeNickname)
|
||||||
|
if store == "param":
|
||||||
|
self.genParams()
|
||||||
|
self.stores[store].update("self", self.getParamsBytes(), recur=False)
|
||||||
|
elif store == "key":
|
||||||
|
self.stores[store].update("self", {}, recur=False)
|
||||||
|
else:
|
||||||
|
self.cLog(30, "Store not defined")
|
||||||
|
|
||||||
|
def genParams(self):
|
||||||
|
"""
|
||||||
|
Generate Diffie Hellman parameters
|
||||||
|
"""
|
||||||
|
params = dh.generate_parameters(generator=2, key_size=2048)
|
||||||
|
self.params = params
|
||||||
|
return params
|
||||||
|
|
||||||
|
def getParamsBytes(self):
|
||||||
|
"""
|
||||||
|
Get bytes encoded from self.parameters (TODO: Encode from store)
|
||||||
|
"""
|
||||||
|
return self.params.parameter_bytes(Encoding.PEM, ParameterFormat.PKCS3)
|
||||||
|
|
||||||
|
def loadParamBytes(self, pemBytes: bytes):
|
||||||
|
"""
|
||||||
|
Load parameters to self.params from given bytes (TODO: Load from store)
|
||||||
|
"""
|
||||||
|
self.params = Serialization.load_pem_parameters(pemBytes)
|
||||||
|
return self.params
|
||||||
|
|
||||||
|
def genKeyPair(self, paramsOverride=False, setSelf: bool = True):
|
||||||
|
"""
|
||||||
|
Generate public and private keys from self.params (TODO: Gen from passed params)
|
||||||
|
|
||||||
|
paramsOverride
|
||||||
|
False or parameters to use (TODO)
|
||||||
|
|
||||||
|
setSelf: bool
|
||||||
|
Whether to set self.privateKey and self.publicKey
|
||||||
|
"""
|
||||||
|
privateKey = self.params.generate_private_key()
|
||||||
|
if setSelf:
|
||||||
|
self.privateKey = privateKey
|
||||||
|
publicKey = privateKey.public_key()
|
||||||
|
if setSelf:
|
||||||
|
self.publicKey = publicKey
|
||||||
|
self.stores["key"].update(
|
||||||
|
"self",
|
||||||
|
{
|
||||||
|
"publicKey": self.publicKey.public_bytes(
|
||||||
|
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
|
||||||
|
),
|
||||||
|
"privateKey": self.privateKey.private_bytes(
|
||||||
|
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return [privateKey, publicKey]
|
||||||
|
else:
|
||||||
|
publicKey = publicKey.public_bytes(
|
||||||
|
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
|
||||||
|
)
|
||||||
|
privateKey = privateKey.private_bytes(
|
||||||
|
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
|
||||||
|
)
|
||||||
|
return [privateKey, publicKey]
|
||||||
|
|
||||||
|
def keyDerive(self, pubKey: bytes, salt: bytes, nodeID: str, params: bytes):
|
||||||
|
"""
|
||||||
|
Derive shared key using Diffie Hellman
|
||||||
|
|
||||||
|
pubKey: bytes
|
||||||
|
Public key
|
||||||
|
|
||||||
|
nodeID: str
|
||||||
|
PierMesh node ID
|
||||||
|
|
||||||
|
params: bytes
|
||||||
|
Encryption parameters
|
||||||
|
"""
|
||||||
|
if self.checkInMem("param", nodeID) == False:
|
||||||
|
if self.getRecord("param", nodeID) == False:
|
||||||
|
self.updateStore("param", nodeID, params, recur=False)
|
||||||
|
self.loadRecordToMem("param", nodeID)
|
||||||
|
self.cLog(20, "Precheck done for key derivation")
|
||||||
|
|
||||||
|
# TODO: Load them and if private key exists load it, otherwise generate a private key
|
||||||
|
if self.checkInMem("key", nodeID) == False:
|
||||||
|
if self.getRecord("key", nodeID) == False:
|
||||||
|
privateKey, publicKey = self.genKeyPair(setSelf=False)
|
||||||
|
self.updateStore(
|
||||||
|
"key", nodeID, {"publicKey": publicKey, "privateKey": privateKey}
|
||||||
|
)
|
||||||
|
self.loadRecordToMem("key", nodeID)
|
||||||
|
|
||||||
|
sharedKey = self.loadedKeys[nodeID]["privateKey"].exchange(
|
||||||
|
Serialization.load_pem_public_key(pubKey)
|
||||||
|
)
|
||||||
|
# Perform key derivation.
|
||||||
|
self.cLog(20, "Performing key derivation")
|
||||||
|
derivedKey = HKDF(
|
||||||
|
algorithm=hashes.SHA256(), length=32, salt=salt, info=b"handshake data"
|
||||||
|
).derive(sharedKey)
|
||||||
|
self.cLog(20, "Derived key")
|
||||||
|
ederivedKey = base64.urlsafe_b64encode(derivedKey)
|
||||||
|
tr = self.getRecord("key", nodeID)
|
||||||
|
tr["derivedKey"] = ederivedKey
|
||||||
|
self.updateStore("key", nodeID, tr)
|
||||||
|
self.cLog(20, "Done with cryptography store updates")
|
||||||
|
return ederivedKey
|
||||||
|
|
||||||
|
def getSalt(self):
|
||||||
|
"""
|
||||||
|
Get random salt
|
||||||
|
"""
|
||||||
|
return os.urandom(16)
|
||||||
|
|
||||||
|
def encrypt(self, data, nodeID: str, isDict: bool = True):
|
||||||
|
"""
|
||||||
|
Do Fernet encryption
|
||||||
|
|
||||||
|
data
|
||||||
|
Either bytes or dict to encrypt
|
||||||
|
|
||||||
|
isDict: bool
|
||||||
|
Whether data is a dictionary
|
||||||
|
"""
|
||||||
|
r = self.getRecord("key", nodeID)
|
||||||
|
if r == False:
|
||||||
|
self.cLog(20, "Node {0} not in keystore".format(nodeID))
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
derivedKey = r["derivedKey"]
|
||||||
|
fernet = Fernet(derivedKey)
|
||||||
|
if isDict:
|
||||||
|
data = msgpack.dumps(data)
|
||||||
|
token = fernet.encrypt(data)
|
||||||
|
return token
|
||||||
|
|
||||||
|
def decrypt(self, data, nodeID: str):
|
||||||
|
"""
|
||||||
|
Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load)
|
||||||
|
"""
|
||||||
|
r = self.getRecord("key", nodeID)
|
||||||
|
if r == False:
|
||||||
|
self.cLog(20, "No record of node " + nodeID)
|
||||||
|
return False
|
||||||
|
elif not "derivedKey" in r.keys():
|
||||||
|
self.cLog(20, "No key derived for node " + nodeID)
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
fernet = Fernet(self.getRecord("key", nodeID)["derivedKey"])
|
||||||
|
return msgpack.loads(fernet.decrypt(data))
|
|
@ -0,0 +1,7 @@
|
||||||
|
|
||||||
|
Diffie hellman ephemeral
|
||||||
|
Fernet based encryption
|
||||||
|
==========================
|
||||||
|
|
||||||
|
.. autoclass:: Cryptography.WhaleSong.DHEFern
|
||||||
|
:members:
|
|
@ -0,0 +1,135 @@
|
||||||
|
from Daisy.Daisy import Daisy
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import msgpack
|
||||||
|
|
||||||
|
from watchdog.observers import Observer
|
||||||
|
|
||||||
|
# TODO: Dumping to cacheFile
|
||||||
|
|
||||||
|
|
||||||
|
class Cache:
|
||||||
|
"""
|
||||||
|
In memory collection of Daisy records
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
filepaths=None,
|
||||||
|
cacheFile=None,
|
||||||
|
path: str = "daisy",
|
||||||
|
walk: bool = False,
|
||||||
|
isCatch: bool = False,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
filepaths
|
||||||
|
Either a list of filepaths to load or None
|
||||||
|
|
||||||
|
cacheFile
|
||||||
|
Path to a cache file which is a collection of paths to load
|
||||||
|
|
||||||
|
path: str
|
||||||
|
Path prefix to load records from
|
||||||
|
|
||||||
|
walk: bool
|
||||||
|
Whether to automatically walk the path and load records
|
||||||
|
|
||||||
|
isCatch: bool
|
||||||
|
Whether this cache is for catchs
|
||||||
|
"""
|
||||||
|
self.data = {}
|
||||||
|
self.path = path
|
||||||
|
|
||||||
|
if filepaths != None:
|
||||||
|
for fp in filepaths:
|
||||||
|
fp = path + "/" + fp
|
||||||
|
if os.path.isfile(fp):
|
||||||
|
self.data[fp] = Daisy(fp)
|
||||||
|
elif cacheFile != None:
|
||||||
|
with open(cacheFile, "r") as f:
|
||||||
|
for fp in f.read().split("\n"):
|
||||||
|
self.data[fp] = Daisy(fp)
|
||||||
|
elif walk:
|
||||||
|
for root, dirs, files in os.walk(self.path):
|
||||||
|
for p in dirs + files:
|
||||||
|
if not (".json" in p):
|
||||||
|
if not (".md" in p):
|
||||||
|
tpath = root + "/" + p
|
||||||
|
self.data[tpath] = Daisy(tpath)
|
||||||
|
|
||||||
|
def create(self, path: str, data: dict):
|
||||||
|
"""
|
||||||
|
Create new record
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
Path to create record at
|
||||||
|
|
||||||
|
data: dict
|
||||||
|
Data to populate record with
|
||||||
|
"""
|
||||||
|
with open(self.path + "/" + path, "wb") as f:
|
||||||
|
f.write(msgpack.dumps(data))
|
||||||
|
# logging.log(10, "Done creating record")
|
||||||
|
self.data[path] = Daisy(self.path + "/" + path)
|
||||||
|
# logging.log(10, "Done loading to Daisy")
|
||||||
|
return self.data[path]
|
||||||
|
|
||||||
|
def get(self, path: str):
|
||||||
|
"""
|
||||||
|
Get record at path, else return False
|
||||||
|
|
||||||
|
path: str
|
||||||
|
Path of record
|
||||||
|
"""
|
||||||
|
if path in self.data.keys():
|
||||||
|
return self.data[path]
|
||||||
|
else:
|
||||||
|
if os.path.exists(self.path + "/" + path):
|
||||||
|
self.data[path] = Daisy(self.path + "/" + path)
|
||||||
|
return self.data[path]
|
||||||
|
else:
|
||||||
|
# logging.log(10, "File does not exist")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def refresh(self):
|
||||||
|
"""
|
||||||
|
Reload from disk to memory
|
||||||
|
"""
|
||||||
|
for key in self.data.keys():
|
||||||
|
self.data[key].read()
|
||||||
|
|
||||||
|
def search(self, keydict: dict, strict: bool = True):
|
||||||
|
"""
|
||||||
|
Search cache for record for records with values
|
||||||
|
|
||||||
|
keydict: dict
|
||||||
|
Values to search for
|
||||||
|
|
||||||
|
strict: bool
|
||||||
|
Whether to require values match
|
||||||
|
"""
|
||||||
|
results = []
|
||||||
|
for key, val in self.data.items():
|
||||||
|
val = val.get()
|
||||||
|
if strict and type(val) != str:
|
||||||
|
addcheck = False
|
||||||
|
for k, v in keydict.items():
|
||||||
|
if k in val.keys():
|
||||||
|
if v in val[k]:
|
||||||
|
addcheck = True
|
||||||
|
else:
|
||||||
|
addcheck = False
|
||||||
|
break
|
||||||
|
if addcheck:
|
||||||
|
results.append([key, val])
|
||||||
|
elif type(val) != str:
|
||||||
|
for k, v in keydict.items():
|
||||||
|
if k in val.keys():
|
||||||
|
if v in val[k]:
|
||||||
|
results.append([key, val])
|
||||||
|
return results
|
|
@ -0,0 +1,5 @@
|
||||||
|
Daisy based cache
|
||||||
|
==========================
|
||||||
|
|
||||||
|
.. autoclass:: Daisy.Cache.Cache
|
||||||
|
:members:
|
|
@ -0,0 +1,67 @@
|
||||||
|
from Daisy.Cache import Cache
|
||||||
|
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
|
||||||
|
|
||||||
|
class Catch(Cache):
|
||||||
|
"""
|
||||||
|
Sub class of Cache for handling catchs
|
||||||
|
|
||||||
|
.. image:: https://git.utopic.work/PierMesh/piermesh/raw/branch/main/imgs/catchdisplay.png
|
||||||
|
"""
|
||||||
|
|
||||||
|
catches = {}
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, path: str = "catch", filepaths=None, catchFile=None, walk: bool = False
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Basically the same initialization parameters as Catch
|
||||||
|
"""
|
||||||
|
super().__init__(
|
||||||
|
filepaths=filepaths, cacheFile=catchFile, path=path, walk=walk, isCatch=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: Fins
|
||||||
|
|
||||||
|
def sget(self, path: str):
|
||||||
|
"""
|
||||||
|
Call Cache's get to get record
|
||||||
|
"""
|
||||||
|
return super().get(path)
|
||||||
|
|
||||||
|
# TODO: Rename
|
||||||
|
def get(self, head: str, tail: str, fins=None):
|
||||||
|
"""
|
||||||
|
Get catch by pieces
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
head: str
|
||||||
|
First part of catch (maximum: 4 characters)
|
||||||
|
|
||||||
|
tail: str
|
||||||
|
Second part of catch (maximum: 16 characters)
|
||||||
|
|
||||||
|
fins
|
||||||
|
List of (maximum 8 characters) strings at the end of the catch oe None if none
|
||||||
|
"""
|
||||||
|
r = self.search({"head": head, "tail": tail})
|
||||||
|
return r[0][1]["html"]
|
||||||
|
|
||||||
|
def addc(self, peer, node, seperator, head, tail, data, fins=None):
|
||||||
|
tnpath = "catch/" + node
|
||||||
|
if os.path.exists(tnpath) != True:
|
||||||
|
os.makedirs(tnpath)
|
||||||
|
tppath = tnpath + "/" + peer
|
||||||
|
if os.path.exists(tppath) != True:
|
||||||
|
os.makedirs(tppath)
|
||||||
|
sid = str(random.randrange(0, 999999)).zfill(6)
|
||||||
|
data["seperator"] = seperator
|
||||||
|
data["head"] = head
|
||||||
|
data["tail"] = tail
|
||||||
|
if fins != None:
|
||||||
|
data["fins"] = fins
|
||||||
|
res = self.create("{0}/{1}/{2}".format(node, peer, sid), data)
|
||||||
|
return [sid, res]
|
|
@ -0,0 +1,6 @@
|
||||||
|
Daisy cache for catchs,
|
||||||
|
PierMesh's domain analog
|
||||||
|
==========================
|
||||||
|
|
||||||
|
.. autoclass:: Daisy.Catch.Catch
|
||||||
|
:members:
|
|
@ -0,0 +1,189 @@
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import msgpack
|
||||||
|
|
||||||
|
# TODO: delete
|
||||||
|
# TODO: propagate json changes to msgpack automatically
|
||||||
|
# TODO: propagate msgpack changes to cache automatically
|
||||||
|
# TODO: Indexing
|
||||||
|
|
||||||
|
|
||||||
|
def _json_to_msg(path: str):
|
||||||
|
"""
|
||||||
|
Convert json at the path plus .json to a msgpack binary
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
Path to json minus the extension
|
||||||
|
"""
|
||||||
|
rpath = path + ".json"
|
||||||
|
res = b""
|
||||||
|
with open(rpath) as f:
|
||||||
|
res = msgpack.dumps(json.load(f))
|
||||||
|
with open(path, "wb") as f:
|
||||||
|
f.write(res)
|
||||||
|
|
||||||
|
|
||||||
|
class Daisy:
|
||||||
|
"""
|
||||||
|
Base class for Daisy data representation
|
||||||
|
|
||||||
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Components/daisy.py>`_
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
filepath: str
|
||||||
|
Path to file representation on disk
|
||||||
|
|
||||||
|
msg: dict
|
||||||
|
In memory representation
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
filepath: str,
|
||||||
|
templates: dict = {},
|
||||||
|
template: bool = False,
|
||||||
|
prefillDict: bool = False,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
filepath: str
|
||||||
|
Path to disk location
|
||||||
|
|
||||||
|
templates: dict
|
||||||
|
Dictionary of templates to Use
|
||||||
|
|
||||||
|
template: bool
|
||||||
|
Which template to Use
|
||||||
|
|
||||||
|
prefillDict: bool
|
||||||
|
Whether to fill the record with a template
|
||||||
|
"""
|
||||||
|
self.filepath = filepath
|
||||||
|
if os.path.exists(filepath) != True:
|
||||||
|
with open(filepath, "wb") as f:
|
||||||
|
if template != False:
|
||||||
|
if template in templates.keys():
|
||||||
|
t = templates[template].get()
|
||||||
|
if prefillDict != False:
|
||||||
|
for k in prefillDict.keys():
|
||||||
|
t[k] = prefillDict[k]
|
||||||
|
f.write(msgpack.dumps(t))
|
||||||
|
self.msg = t
|
||||||
|
else:
|
||||||
|
print("No such template as: " + template)
|
||||||
|
else:
|
||||||
|
f.write(msgpack.dumps({}))
|
||||||
|
self.msg = {}
|
||||||
|
elif os.path.isdir(filepath):
|
||||||
|
self.msg = "directory"
|
||||||
|
else:
|
||||||
|
with open(filepath, "rb") as f:
|
||||||
|
self.msg = msgpack.loads(f.read())
|
||||||
|
|
||||||
|
# Use override for updating
|
||||||
|
|
||||||
|
def write(
|
||||||
|
self,
|
||||||
|
override=False,
|
||||||
|
encrypt: bool = False,
|
||||||
|
encryptKey=None,
|
||||||
|
recur: bool = False,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Write record to disk
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
override
|
||||||
|
Either false or a dictionary of values to set on the record
|
||||||
|
|
||||||
|
encrypt: bool
|
||||||
|
Whether to encrypt the record (TODO)
|
||||||
|
|
||||||
|
encryptKey
|
||||||
|
Key to encrypt record with, or None if not set
|
||||||
|
|
||||||
|
recur: bool
|
||||||
|
Whether to recursively handle keys
|
||||||
|
"""
|
||||||
|
if override != False:
|
||||||
|
for key in override.keys():
|
||||||
|
# TODO: Deeper recursion
|
||||||
|
if recur:
|
||||||
|
if not key in self.msg.keys():
|
||||||
|
self.msg[key] = {}
|
||||||
|
for ikey in override[key].keys():
|
||||||
|
self.msg[key][ikey] = override[key][ikey]
|
||||||
|
else:
|
||||||
|
self.msg[key] = override[key]
|
||||||
|
data = msgpack.dumps(self.msg)
|
||||||
|
with open(self.filepath, "wb") as f:
|
||||||
|
f.write(data)
|
||||||
|
|
||||||
|
# Use for refreshing
|
||||||
|
|
||||||
|
def read(self, decrypt: bool = False, decryptKey=False):
|
||||||
|
"""
|
||||||
|
Read record from disk to memory
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
decrypt: bool
|
||||||
|
Whether to decrypt record
|
||||||
|
|
||||||
|
decryptKey
|
||||||
|
Key to decrypt record
|
||||||
|
"""
|
||||||
|
if os.path.isdir(self.filepath):
|
||||||
|
self.msg = "directory"
|
||||||
|
else:
|
||||||
|
with open(self.filepath, "rb") as f:
|
||||||
|
self.msg = msgpack.loads(f.read())
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
"""
|
||||||
|
Get record dictionary from memory
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
self.msg: dict
|
||||||
|
"""
|
||||||
|
return self.msg
|
||||||
|
|
||||||
|
def sublist(self):
|
||||||
|
"""
|
||||||
|
Lists contents of directory if object is a directory, otherwise return None
|
||||||
|
"""
|
||||||
|
fpath = self.filepath
|
||||||
|
if os.path.isdir(fpath):
|
||||||
|
return ["messages/" + x for x in os.listdir(fpath)]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def loadTemplates(templatePath: str = "templates"):
|
||||||
|
"""Load templates for prefilling records
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
templatePath: str
|
||||||
|
Path to templates
|
||||||
|
"""
|
||||||
|
templates = {}
|
||||||
|
for p in os.listdir(templatePath):
|
||||||
|
p = templatePath + "/" + p
|
||||||
|
if os.path.isdir(p):
|
||||||
|
for ip in os.listdir(p):
|
||||||
|
ip = p + "/" + ip
|
||||||
|
if os.path.isdir(ip):
|
||||||
|
print("Too deep, skipping: " + ip)
|
||||||
|
else:
|
||||||
|
templates[ip] = Daisy(ip)
|
||||||
|
else:
|
||||||
|
templates[p] = Daisy(p)
|
||||||
|
self.templates = templates
|
||||||
|
return templates
|
|
@ -0,0 +1,6 @@
|
||||||
|
Schemaless binary database
|
||||||
|
base class
|
||||||
|
==========================
|
||||||
|
|
||||||
|
.. autoclass:: Daisy.Daisy.Daisy
|
||||||
|
:members:
|
|
@ -0,0 +1,47 @@
|
||||||
|
from watchdog.observers import Observer
|
||||||
|
from watchdog.events import FileSystemEventHandler
|
||||||
|
|
||||||
|
global garden
|
||||||
|
"""
|
||||||
|
Map of instances to list of signals
|
||||||
|
to be processed
|
||||||
|
"""
|
||||||
|
garden = {}
|
||||||
|
|
||||||
|
|
||||||
|
class Compound(FileSystemEventHandler):
|
||||||
|
"""
|
||||||
|
File system watcher to propagate disk changes
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache, isCatch: bool = False):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
cache: Cache
|
||||||
|
Daisy cache to update
|
||||||
|
|
||||||
|
isCatch: bool
|
||||||
|
Is the cache for catchs
|
||||||
|
"""
|
||||||
|
self.cache = cache
|
||||||
|
self.isCatch = isCatch
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
def on_any_event(self, event):
|
||||||
|
"""
|
||||||
|
Called when a CRUD operation is performed on a record file
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
event
|
||||||
|
Event object provided by watchdog
|
||||||
|
"""
|
||||||
|
if not (".json" in event.src_path):
|
||||||
|
if not (".md" in event.src_path):
|
||||||
|
tpath = "/".join(event.src_path.split("/")[1:])
|
||||||
|
if tpath != "":
|
||||||
|
if self.isCatch:
|
||||||
|
self.cache.sget(tpath)
|
||||||
|
else:
|
||||||
|
self.cache.get(tpath).get()
|
|
@ -0,0 +1,5 @@
|
||||||
|
Daisy signal management
|
||||||
|
==========================
|
||||||
|
|
||||||
|
.. autoclass:: Daisy.Soil.Compound
|
||||||
|
:members:
|
|
@ -0,0 +1,27 @@
|
||||||
|
from Daisy.Daisy import Daisy
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
class Store(Daisy):
|
||||||
|
def __init__(self, store: str, path: str, nodeNickname: str):
|
||||||
|
fpath = "daisy/{0}/{1}".format(path, nodeNickname)
|
||||||
|
cpath = "{0}/{1}/{2}".format(path, nodeNickname, store)
|
||||||
|
if not os.path.exists(fpath):
|
||||||
|
os.mkdir(fpath)
|
||||||
|
super().__init__("daisy/" + cpath)
|
||||||
|
|
||||||
|
def update(self, entry: str, data, recur: bool = True):
|
||||||
|
if recur:
|
||||||
|
for key in data.keys():
|
||||||
|
self.msg[entry][key] = data[key]
|
||||||
|
else:
|
||||||
|
self.msg[entry] = data
|
||||||
|
self.write()
|
||||||
|
|
||||||
|
def getRecord(self, key: str):
|
||||||
|
if key in self.get().keys():
|
||||||
|
return self.get()[key]
|
||||||
|
else:
|
||||||
|
self.cLog(20, "Record does not exist")
|
||||||
|
return False
|
|
@ -0,0 +1,6 @@
|
||||||
|
Daisy based key value
|
||||||
|
store with recursion
|
||||||
|
==========================
|
||||||
|
|
||||||
|
.. autoclass:: Daisy.Store.Store
|
||||||
|
:members:
|
|
@ -0,0 +1,123 @@
|
||||||
|
import Packets.Packet as p
|
||||||
|
import Packets.HeaderPacket as h
|
||||||
|
import lzma
|
||||||
|
import msgpack
|
||||||
|
import random
|
||||||
|
import math
|
||||||
|
|
||||||
|
# DO NOT CHANGE DATA SIZE UNLESS YOU KNOW WHAT YOURE DOING
|
||||||
|
|
||||||
|
|
||||||
|
class Message:
|
||||||
|
"""
|
||||||
|
Full message which is composed of `Packets.Packet.Packet`s
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
packets: list[Packets.Packet.Packet]
|
||||||
|
List of packets making up the Message
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
bytesObject: bytes,
|
||||||
|
sender: int,
|
||||||
|
senderDisplayName: int,
|
||||||
|
recipient: int,
|
||||||
|
recipientNode: int,
|
||||||
|
dataSize: int = 128,
|
||||||
|
wantFullResponse: bool = False,
|
||||||
|
packetsClass: int = 0,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
bytesObject: bytes
|
||||||
|
Bytes to split into packets
|
||||||
|
|
||||||
|
sender: int
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
senderDisplayName: int
|
||||||
|
3 digit (maximum) ID for mapping display names to a given user
|
||||||
|
|
||||||
|
recipient: int
|
||||||
|
6 digit (maximum) node or peer ID
|
||||||
|
|
||||||
|
recipientNode: int
|
||||||
|
6 digit (maximum) node ID to route the packet to
|
||||||
|
|
||||||
|
dataSize: int
|
||||||
|
Size to cut the bytesObject into per packet
|
||||||
|
|
||||||
|
wantFullResponse: bool
|
||||||
|
Whether to send a response when the message has completed reception (TODO: Kill all retries for associated packets when received)
|
||||||
|
|
||||||
|
packetsClass: int
|
||||||
|
Which protocol the packets are using
|
||||||
|
"""
|
||||||
|
if isinstance(bytesObject, list):
|
||||||
|
packets = [h.Header(bytesObject[0])]
|
||||||
|
for packet in bytesObject:
|
||||||
|
packets.append(
|
||||||
|
p.Packet(
|
||||||
|
packet["data"],
|
||||||
|
packetsID=packet["packetsID"],
|
||||||
|
packetNumber=packet["packetNumber"],
|
||||||
|
packetsClass=packetsClass,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.packets = packets
|
||||||
|
else:
|
||||||
|
bytesObject = lzma.compress(bytesObject)
|
||||||
|
packets = []
|
||||||
|
self.packetsID = random.randrange(0, 999999)
|
||||||
|
pnum = 1
|
||||||
|
blen = math.ceil(len(bytesObject) / dataSize)
|
||||||
|
tb = b""
|
||||||
|
for it in range(blen):
|
||||||
|
if it >= (blen - 1):
|
||||||
|
b = bytesObject[it * dataSize :]
|
||||||
|
else:
|
||||||
|
b = bytesObject[it * dataSize : (it * dataSize + dataSize)]
|
||||||
|
packets.append(
|
||||||
|
p.Packet(b, self.packetsID, pnum, packetsClass=packetsClass)
|
||||||
|
)
|
||||||
|
pnum += 1
|
||||||
|
tb += b
|
||||||
|
packets.insert(
|
||||||
|
0,
|
||||||
|
h.Header(
|
||||||
|
self.packetsID,
|
||||||
|
pnum,
|
||||||
|
sender,
|
||||||
|
senderDisplayName,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
wantFullResponse=wantFullResponse,
|
||||||
|
packetsClass=packetsClass,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
for it in range(pnum):
|
||||||
|
packet = msgpack.loads(packets[it].dump())
|
||||||
|
packet["packetCount"] = pnum
|
||||||
|
|
||||||
|
packets[it] = msgpack.dumps(packet)
|
||||||
|
|
||||||
|
self.packets = packets
|
||||||
|
|
||||||
|
def get(self) -> list[p.Packet]:
|
||||||
|
"""
|
||||||
|
Get and return all packets
|
||||||
|
"""
|
||||||
|
return self.packets
|
||||||
|
|
||||||
|
def reassemble(self, completedMessage: dict):
|
||||||
|
"""
|
||||||
|
Reassemble packets from a completed message in `Sponge.base`
|
||||||
|
"""
|
||||||
|
data = b""
|
||||||
|
for it in range(1, int(completedMessage["packetCount"])):
|
||||||
|
data += completedMessage["data"][completedMessage["dataOrder"].index(it)]
|
||||||
|
res = msgpack.loads(lzma.decompress(data))
|
||||||
|
return res
|
|
@ -0,0 +1,5 @@
|
||||||
|
Full message
|
||||||
|
===============================
|
||||||
|
|
||||||
|
.. autoclass:: Packets.Message.Message
|
||||||
|
:members:
|
|
@ -0,0 +1,4 @@
|
||||||
|
class SubMessage:
|
||||||
|
"""
|
||||||
|
TODO
|
||||||
|
"""
|
|
@ -1,3 +1,6 @@
|
||||||
SubPackets for handling
|
SubPackets for handling
|
||||||
full submessages
|
full submessages
|
||||||
=======================
|
=======================
|
||||||
|
|
||||||
|
.. autoclass:: Packets.SubMessage.SubMessage
|
||||||
|
:members:
|
|
@ -0,0 +1,4 @@
|
||||||
|
class SubPacket:
|
||||||
|
"""
|
||||||
|
TODO
|
||||||
|
"""
|
|
@ -1,3 +1,6 @@
|
||||||
SubPacket for handling
|
SubPacket for handling
|
||||||
individual packets of submessages
|
individual packets of submessages
|
||||||
=================================
|
=================================
|
||||||
|
|
||||||
|
.. autoclass:: Packets.SubPacket.SubPacket
|
||||||
|
:members:
|
||||||
|
|
|
@ -0,0 +1,40 @@
|
||||||
|
from jinja2 import Environment, FileSystemLoader, select_autoescape
|
||||||
|
import os, markdown2
|
||||||
|
import json, msgpack, subprocess
|
||||||
|
import shutil
|
||||||
|
from distutils.dir_util import copy_tree
|
||||||
|
|
||||||
|
env = Environment(loader=FileSystemLoader("templates"))
|
||||||
|
|
||||||
|
# subprocess.check_call("mmdc -i * -e png")
|
||||||
|
|
||||||
|
# TODO: Generating mmd from docstrings
|
||||||
|
|
||||||
|
for path in os.listdir("diagrams/markdown"):
|
||||||
|
fname = path.split(".")[0]
|
||||||
|
try:
|
||||||
|
subprocess.check_call(
|
||||||
|
"mmdc -i diagrams/markdown/{0} -o res/img/diagrams/{1}.png".format(
|
||||||
|
path, fname
|
||||||
|
),
|
||||||
|
shell=True,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print("Empty file or other error")
|
||||||
|
|
||||||
|
|
||||||
|
copy_tree("diagrams/markdown", "res/diagrams")
|
||||||
|
copy_tree("res", "build/res")
|
||||||
|
shutil.copyfile("htmx-extensions/src/ws/ws.js", "build/res/js/ws.js")
|
||||||
|
|
||||||
|
tpath = "templates/"
|
||||||
|
|
||||||
|
for path in os.listdir(tpath):
|
||||||
|
if ("base" in path) != True:
|
||||||
|
for t in os.listdir(tpath + path):
|
||||||
|
if os.path.exists("build/" + path) != True:
|
||||||
|
os.makedirs("build/" + path)
|
||||||
|
ipath = tpath + path + "/" + t
|
||||||
|
template = env.get_template(path + "/" + t)
|
||||||
|
with open("build/{0}/{1}".format(path, t), "w") as f:
|
||||||
|
f.write(template.render())
|
|
@ -0,0 +1,13 @@
|
||||||
|
---
|
||||||
|
title: "🔵 Bubble"
|
||||||
|
---
|
||||||
|
erDiagram
|
||||||
|
"👥 Peer" |{..o| "🗄️ Server" : "🔌 WS"
|
||||||
|
"👥 Peer" |{..o| "🗄️ Server": "📄 HTTP/S"
|
||||||
|
"🗄️ Server" |o..o| "📤 Transmitter": "❔ Queries"
|
||||||
|
"📤 Transmitter" |o..o| "📻 PierMesh": "📻 send"
|
||||||
|
"📤 Transmitter" |o..o| "📻 PierMesh": "📻 onReceive"
|
||||||
|
"📤 Transmitter" |o..o| "🧽 Sieve": "📻 onReceive"
|
||||||
|
"🧽 Sieve" |o..o| "💿 Cache": "➕ Write"
|
||||||
|
"💿 Cache" |o..o| "👂 fListen": "➕ Write event"
|
||||||
|
"👂 fListen" |o..o| "🗄️ Server": "✉️ Pass message"
|
|
@ -0,0 +1,16 @@
|
||||||
|
---
|
||||||
|
title: "🐟 Catch"
|
||||||
|
---
|
||||||
|
erDiagram
|
||||||
|
"👥 Peer" |{..o| "🗄️ Server": "🔌 WS"
|
||||||
|
"👥 Peer" |{..o| "🗄️ Server": "📄 HTTP/S"
|
||||||
|
"🗄️ Server" |o..o| "🐟 Catch": "❔ Queries"
|
||||||
|
"📻 PierMesh" |o..o| "🧽 Sieve": "🧽 Filters"
|
||||||
|
"🧽 Sieve" |o..o| "👂 fListen": "👂 Listens for messages"
|
||||||
|
"👂 fListen" |o..o| "🐟 Catch": "❔ Queries"
|
||||||
|
"🐟 Catch" |o..}| "🌼 Daisy": "📄 Store references"
|
||||||
|
"🌼 Daisy" {
|
||||||
|
string filepath
|
||||||
|
dictionary msg
|
||||||
|
}
|
||||||
|
"🌼 Daisy" |o..o| "📁 File system": "📁 CRUD"
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
title: "🌼 Daisy"
|
||||||
|
---
|
||||||
|
erDiagram
|
||||||
|
|
|
@ -0,0 +1,20 @@
|
||||||
|
---
|
||||||
|
title: "📻 PierMesh"
|
||||||
|
---
|
||||||
|
erDiagram
|
||||||
|
"👥 Peer" }|..|{ "🗄️Server" : "🔌 WS"
|
||||||
|
"👥 Peer" }|..|{ "🗄️Server": "📄 HTTP/S"
|
||||||
|
"🗄️Server" |o..o| "🐟 Catch": "❔ Queries"
|
||||||
|
"🗄️Server" |o..o| "💿 Cache": "❔ Queries"
|
||||||
|
"🗄️Server" |o..o| "📤 Transmitter": "❔ Queries"
|
||||||
|
"🐟 Catch" |o..o| "📤 Transmitter": "❔ Queries"
|
||||||
|
"🐟 Catch" |o..o| "👥 Peer": "🔌 WS"
|
||||||
|
"🐟 Catch" |o..o| "📤 Transmitter": "✉️ Sync packet"
|
||||||
|
"💿 Cache" |o..o| "📤 Transmitter": "✉️ Sync packet"
|
||||||
|
"👂 fListen" |o..o| "💿 Cache": "👂 Listen for completed messages"
|
||||||
|
"👂 fListen" |o..o| "👥 Peer": "🔌 WS"
|
||||||
|
"📤 Transmitter" |o..o| "🔽 onReceive": "✉️ Get packet"
|
||||||
|
"🔽 onReceive" |o..o| "🧽 Sieve": "🧽 Filter packet"
|
||||||
|
"🧽 Sieve" |o..o| "💿 Cache": "➕ Push completed messages"
|
||||||
|
"📤 Transmitter" |o..o| "📻 PierMesh": "📻 Send"
|
||||||
|
"📻 PierMesh" |o..o| "🔽 onReceive": "🔽 Receive"
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
title: "📤 Transmitter"
|
||||||
|
---
|
||||||
|
erDiagram
|
||||||
|
|
After Width: | Height: | Size: 89 KiB |
|
@ -0,0 +1 @@
|
||||||
|
mmdc -i res/misc/dia.mmd -o res/misc/dia.png
|
|
@ -0,0 +1,10 @@
|
||||||
|
@font-face {
|
||||||
|
font-family: 'Ubuntu Nerd Font';
|
||||||
|
src: url('/res/fonts/UbuntuNF.eot');
|
||||||
|
src: url('/res/fonts/UbuntuNF.eot?#iefix') format('embedded-opentype'),
|
||||||
|
url('/res/fonts/UbuntuNF.woff2') format('woff2'),
|
||||||
|
url('/res/fonts/UbuntuNF.woff') format('woff');
|
||||||
|
font-weight: normal;
|
||||||
|
font-style: normal;
|
||||||
|
font-display: swap;
|
||||||
|
}
|
|
@ -0,0 +1,37 @@
|
||||||
|
:root {
|
||||||
|
--palette-text-white: #FFFFFF;
|
||||||
|
--palette-text-black: #000000;
|
||||||
|
--palette-text-three: #3A424D;
|
||||||
|
--palette-text-four: #5B8080;
|
||||||
|
|
||||||
|
--palette-one: #3A4D24;
|
||||||
|
--palette-two: #A6B08E;
|
||||||
|
--palette-three: #879B77;
|
||||||
|
--palette-four: #61805B;
|
||||||
|
}
|
||||||
|
|
||||||
|
html {
|
||||||
|
background-color: var(--palette-one);
|
||||||
|
color: var(--palette-text-white);
|
||||||
|
font-family: 'Ubuntu Nerd Font';
|
||||||
|
padding: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.plank {
|
||||||
|
padding: 10px;
|
||||||
|
background-color: var(--palette-two);
|
||||||
|
}
|
||||||
|
|
||||||
|
ul {
|
||||||
|
padding: 0;
|
||||||
|
list-style-type: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
li {
|
||||||
|
padding-top: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
input[type=text],
|
||||||
|
input[type=number] {
|
||||||
|
min-width: 150px;
|
||||||
|
}
|
|
@ -0,0 +1,13 @@
|
||||||
|
---
|
||||||
|
title: "🔵 Bubble"
|
||||||
|
---
|
||||||
|
erDiagram
|
||||||
|
"👥 Peer" |{..o| "🗄️ Server" : "🔌 WS"
|
||||||
|
"👥 Peer" |{..o| "🗄️ Server": "📄 HTTP/S"
|
||||||
|
"🗄️ Server" |o..o| "📤 Transmitter": "❔ Queries"
|
||||||
|
"📤 Transmitter" |o..o| "📻 PierMesh": "📻 send"
|
||||||
|
"📤 Transmitter" |o..o| "📻 PierMesh": "📻 onReceive"
|
||||||
|
"📤 Transmitter" |o..o| "🧽 Sieve": "📻 onReceive"
|
||||||
|
"🧽 Sieve" |o..o| "💿 Cache": "➕ Write"
|
||||||
|
"💿 Cache" |o..o| "👂 fListen": "➕ Write event"
|
||||||
|
"👂 fListen" |o..o| "🗄️ Server": "✉️ Pass message"
|
|
@ -0,0 +1,16 @@
|
||||||
|
---
|
||||||
|
title: "🐟 Catch"
|
||||||
|
---
|
||||||
|
erDiagram
|
||||||
|
"👥 Peer" |{..o| "🗄️ Server": "🔌 WS"
|
||||||
|
"👥 Peer" |{..o| "🗄️ Server": "📄 HTTP/S"
|
||||||
|
"🗄️ Server" |o..o| "🐟 Catch": "❔ Queries"
|
||||||
|
"📻 PierMesh" |o..o| "🧽 Sieve": "🧽 Filters"
|
||||||
|
"🧽 Sieve" |o..o| "👂 fListen": "👂 Listens for messages"
|
||||||
|
"👂 fListen" |o..o| "🐟 Catch": "❔ Queries"
|
||||||
|
"🐟 Catch" |o..}| "🌼 Daisy": "📄 Store references"
|
||||||
|
"🌼 Daisy" {
|
||||||
|
string filepath
|
||||||
|
dictionary msg
|
||||||
|
}
|
||||||
|
"🌼 Daisy" |o..o| "📁 File system": "📁 CRUD"
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
title: "🌼 Daisy"
|
||||||
|
---
|
||||||
|
erDiagram
|
||||||
|
|
|
@ -0,0 +1,20 @@
|
||||||
|
---
|
||||||
|
title: "📻 PierMesh"
|
||||||
|
---
|
||||||
|
erDiagram
|
||||||
|
"👥 Peer" }|..|{ "🗄️Server" : "🔌 WS"
|
||||||
|
"👥 Peer" }|..|{ "🗄️Server": "📄 HTTP/S"
|
||||||
|
"🗄️Server" |o..o| "🐟 Catch": "❔ Queries"
|
||||||
|
"🗄️Server" |o..o| "💿 Cache": "❔ Queries"
|
||||||
|
"🗄️Server" |o..o| "📤 Transmitter": "❔ Queries"
|
||||||
|
"🐟 Catch" |o..o| "📤 Transmitter": "❔ Queries"
|
||||||
|
"🐟 Catch" |o..o| "👥 Peer": "🔌 WS"
|
||||||
|
"🐟 Catch" |o..o| "📤 Transmitter": "✉️ Sync packet"
|
||||||
|
"💿 Cache" |o..o| "📤 Transmitter": "✉️ Sync packet"
|
||||||
|
"👂 fListen" |o..o| "💿 Cache": "👂 Listen for completed messages"
|
||||||
|
"👂 fListen" |o..o| "👥 Peer": "🔌 WS"
|
||||||
|
"📤 Transmitter" |o..o| "🔽 onReceive": "✉️ Get packet"
|
||||||
|
"🔽 onReceive" |o..o| "🧽 Sieve": "🧽 Filter packet"
|
||||||
|
"🧽 Sieve" |o..o| "💿 Cache": "➕ Push completed messages"
|
||||||
|
"📤 Transmitter" |o..o| "📻 PierMesh": "📻 Send"
|
||||||
|
"📻 PierMesh" |o..o| "🔽 onReceive": "🔽 Receive"
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
title: "📤 Transmitter"
|
||||||
|
---
|
||||||
|
erDiagram
|
||||||
|
|
After Width: | Height: | Size: 606 B |
After Width: | Height: | Size: 662 B |
After Width: | Height: | Size: 41 KiB |
After Width: | Height: | Size: 38 KiB |
After Width: | Height: | Size: 2.2 KiB |
After Width: | Height: | Size: 89 KiB |
After Width: | Height: | Size: 2.5 KiB |
After Width: | Height: | Size: 2.3 KiB |
|
@ -0,0 +1 @@
|
||||||
|
npx vite build
|
|
@ -0,0 +1,815 @@
|
||||||
|
{
|
||||||
|
"name": "piermesh",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"": {
|
||||||
|
"name": "piermesh",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"htmx.org": "2.0.0",
|
||||||
|
"three": "^0.166.1"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"vite": "^5.3.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/aix-ppc64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==",
|
||||||
|
"cpu": [
|
||||||
|
"ppc64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"aix"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/android-arm": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==",
|
||||||
|
"cpu": [
|
||||||
|
"arm"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"android"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/android-arm64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"android"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/android-x64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"android"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/darwin-arm64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/darwin-x64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/freebsd-arm64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"freebsd"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/freebsd-x64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"freebsd"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/linux-arm": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==",
|
||||||
|
"cpu": [
|
||||||
|
"arm"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/linux-arm64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/linux-ia32": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==",
|
||||||
|
"cpu": [
|
||||||
|
"ia32"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/linux-loong64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==",
|
||||||
|
"cpu": [
|
||||||
|
"loong64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/linux-mips64el": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==",
|
||||||
|
"cpu": [
|
||||||
|
"mips64el"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/linux-ppc64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==",
|
||||||
|
"cpu": [
|
||||||
|
"ppc64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/linux-riscv64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==",
|
||||||
|
"cpu": [
|
||||||
|
"riscv64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/linux-s390x": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==",
|
||||||
|
"cpu": [
|
||||||
|
"s390x"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/linux-x64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/netbsd-x64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"netbsd"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/openbsd-x64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"openbsd"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/sunos-x64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"sunos"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/win32-arm64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"win32"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/win32-ia32": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==",
|
||||||
|
"cpu": [
|
||||||
|
"ia32"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"win32"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@esbuild/win32-x64": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"win32"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-android-arm-eabi": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-JlPfZ/C7yn5S5p0yKk7uhHTTnFlvTgLetl2VxqE518QgyM7C9bSfFTYvB/Q/ftkq0RIPY4ySxTz+/wKJ/dXC0w==",
|
||||||
|
"cpu": [
|
||||||
|
"arm"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"android"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-android-arm64": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-RDxUSY8D1tWYfn00DDi5myxKgOk6RvWPxhmWexcICt/MEC6yEMr4HNCu1sXXYLw8iAsg0D44NuU+qNq7zVWCrw==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"android"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-darwin-arm64": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-emvKHL4B15x6nlNTBMtIaC9tLPRpeA5jMvRLXVbl/W9Ie7HhkrE7KQjvgS9uxgatL1HmHWDXk5TTS4IaNJxbAA==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-darwin-x64": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-fO28cWA1dC57qCd+D0rfLC4VPbh6EOJXrreBmFLWPGI9dpMlER2YwSPZzSGfq11XgcEpPukPTfEVFtw2q2nYJg==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-2Rn36Ubxdv32NUcfm0wB1tgKqkQuft00PtM23VqLuCUR4N5jcNWDoV5iBC9jeGdgS38WK66ElncprqgMUOyomw==",
|
||||||
|
"cpu": [
|
||||||
|
"arm"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-gJuzIVdq/X1ZA2bHeCGCISe0VWqCoNT8BvkQ+BfsixXwTOndhtLUpOg0A1Fcx/+eA6ei6rMBzlOz4JzmiDw7JQ==",
|
||||||
|
"cpu": [
|
||||||
|
"arm"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-linux-arm64-gnu": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-0EkX2HYPkSADo9cfeGFoQ7R0/wTKb7q6DdwI4Yn/ULFE1wuRRCHybxpl2goQrx4c/yzK3I8OlgtBu4xvted0ug==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-linux-arm64-musl": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-GlIQRj9px52ISomIOEUq/IojLZqzkvRpdP3cLgIE1wUWaiU5Takwlzpz002q0Nxxr1y2ZgxC2obWxjr13lvxNQ==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-N6cFJzssruDLUOKfEKeovCKiHcdwVYOT1Hs6dovDQ61+Y9n3Ek4zXvtghPPelt6U0AH4aDGnDLb83uiJMkWYzQ==",
|
||||||
|
"cpu": [
|
||||||
|
"ppc64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-2DnD3mkS2uuam/alF+I7M84koGwvn3ZVD7uG+LEWpyzo/bq8+kKnus2EVCkcvh6PlNB8QPNFOz6fWd5N8o1CYg==",
|
||||||
|
"cpu": [
|
||||||
|
"riscv64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-linux-s390x-gnu": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-D6pkaF7OpE7lzlTOFCB2m3Ngzu2ykw40Nka9WmKGUOTS3xcIieHe82slQlNq69sVB04ch73thKYIWz/Ian8DUA==",
|
||||||
|
"cpu": [
|
||||||
|
"s390x"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-linux-x64-gnu": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-HBndjQLP8OsdJNSxpNIN0einbDmRFg9+UQeZV1eiYupIRuZsDEoeGU43NQsS34Pp166DtwQOnpcbV/zQxM+rWA==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-linux-x64-musl": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-HxfbvfCKJe/RMYJJn0a12eiOI9OOtAUF4G6ozrFUK95BNyoJaSiBjIOHjZskTUffUrB84IPKkFG9H9nEvJGW6A==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-win32-arm64-msvc": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-HxDMKIhmcguGTiP5TsLNolwBUK3nGGUEoV/BO9ldUBoMLBssvh4J0X8pf11i1fTV7WShWItB1bKAKjX4RQeYmg==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"win32"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-win32-ia32-msvc": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-xItlIAZZaiG/u0wooGzRsx11rokP4qyc/79LkAOdznGRAbOFc+SfEdfUOszG1odsHNgwippUJavag/+W/Etc6Q==",
|
||||||
|
"cpu": [
|
||||||
|
"ia32"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"win32"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@rollup/rollup-win32-x64-msvc": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-xNo5fV5ycvCCKqiZcpB65VMR11NJB+StnxHz20jdqRAktfdfzhgjTiJ2doTDQE/7dqGaV5I7ZGqKpgph6lCIag==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"win32"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@types/estree": {
|
||||||
|
"version": "1.0.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz",
|
||||||
|
"integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"node_modules/esbuild": {
|
||||||
|
"version": "0.21.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz",
|
||||||
|
"integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==",
|
||||||
|
"dev": true,
|
||||||
|
"hasInstallScript": true,
|
||||||
|
"bin": {
|
||||||
|
"esbuild": "bin/esbuild"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"@esbuild/aix-ppc64": "0.21.5",
|
||||||
|
"@esbuild/android-arm": "0.21.5",
|
||||||
|
"@esbuild/android-arm64": "0.21.5",
|
||||||
|
"@esbuild/android-x64": "0.21.5",
|
||||||
|
"@esbuild/darwin-arm64": "0.21.5",
|
||||||
|
"@esbuild/darwin-x64": "0.21.5",
|
||||||
|
"@esbuild/freebsd-arm64": "0.21.5",
|
||||||
|
"@esbuild/freebsd-x64": "0.21.5",
|
||||||
|
"@esbuild/linux-arm": "0.21.5",
|
||||||
|
"@esbuild/linux-arm64": "0.21.5",
|
||||||
|
"@esbuild/linux-ia32": "0.21.5",
|
||||||
|
"@esbuild/linux-loong64": "0.21.5",
|
||||||
|
"@esbuild/linux-mips64el": "0.21.5",
|
||||||
|
"@esbuild/linux-ppc64": "0.21.5",
|
||||||
|
"@esbuild/linux-riscv64": "0.21.5",
|
||||||
|
"@esbuild/linux-s390x": "0.21.5",
|
||||||
|
"@esbuild/linux-x64": "0.21.5",
|
||||||
|
"@esbuild/netbsd-x64": "0.21.5",
|
||||||
|
"@esbuild/openbsd-x64": "0.21.5",
|
||||||
|
"@esbuild/sunos-x64": "0.21.5",
|
||||||
|
"@esbuild/win32-arm64": "0.21.5",
|
||||||
|
"@esbuild/win32-ia32": "0.21.5",
|
||||||
|
"@esbuild/win32-x64": "0.21.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/fsevents": {
|
||||||
|
"version": "2.3.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
|
||||||
|
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
|
||||||
|
"dev": true,
|
||||||
|
"hasInstallScript": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/htmx.org": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/htmx.org/-/htmx.org-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-N0r1VjrqeCpig0mTi2/sooDZBeQlp1RBohnWQ/ufqc7ICaI0yjs04fNGhawm6+/HWhJFlcXn8MqOjWI9QGG2lQ=="
|
||||||
|
},
|
||||||
|
"node_modules/nanoid": {
|
||||||
|
"version": "3.3.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz",
|
||||||
|
"integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==",
|
||||||
|
"dev": true,
|
||||||
|
"funding": [
|
||||||
|
{
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/ai"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"bin": {
|
||||||
|
"nanoid": "bin/nanoid.cjs"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/picocolors": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"node_modules/postcss": {
|
||||||
|
"version": "8.4.39",
|
||||||
|
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.39.tgz",
|
||||||
|
"integrity": "sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==",
|
||||||
|
"dev": true,
|
||||||
|
"funding": [
|
||||||
|
{
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/postcss/"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "tidelift",
|
||||||
|
"url": "https://tidelift.com/funding/github/npm/postcss"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/ai"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dependencies": {
|
||||||
|
"nanoid": "^3.3.7",
|
||||||
|
"picocolors": "^1.0.1",
|
||||||
|
"source-map-js": "^1.2.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "^10 || ^12 || >=14"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/rollup": {
|
||||||
|
"version": "4.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.19.0.tgz",
|
||||||
|
"integrity": "sha512-5r7EYSQIowHsK4eTZ0Y81qpZuJz+MUuYeqmmYmRMl1nwhdmbiYqt5jwzf6u7wyOzJgYqtCRMtVRKOtHANBz7rA==",
|
||||||
|
"dev": true,
|
||||||
|
"dependencies": {
|
||||||
|
"@types/estree": "1.0.5"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"rollup": "dist/bin/rollup"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18.0.0",
|
||||||
|
"npm": ">=8.0.0"
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"@rollup/rollup-android-arm-eabi": "4.19.0",
|
||||||
|
"@rollup/rollup-android-arm64": "4.19.0",
|
||||||
|
"@rollup/rollup-darwin-arm64": "4.19.0",
|
||||||
|
"@rollup/rollup-darwin-x64": "4.19.0",
|
||||||
|
"@rollup/rollup-linux-arm-gnueabihf": "4.19.0",
|
||||||
|
"@rollup/rollup-linux-arm-musleabihf": "4.19.0",
|
||||||
|
"@rollup/rollup-linux-arm64-gnu": "4.19.0",
|
||||||
|
"@rollup/rollup-linux-arm64-musl": "4.19.0",
|
||||||
|
"@rollup/rollup-linux-powerpc64le-gnu": "4.19.0",
|
||||||
|
"@rollup/rollup-linux-riscv64-gnu": "4.19.0",
|
||||||
|
"@rollup/rollup-linux-s390x-gnu": "4.19.0",
|
||||||
|
"@rollup/rollup-linux-x64-gnu": "4.19.0",
|
||||||
|
"@rollup/rollup-linux-x64-musl": "4.19.0",
|
||||||
|
"@rollup/rollup-win32-arm64-msvc": "4.19.0",
|
||||||
|
"@rollup/rollup-win32-ia32-msvc": "4.19.0",
|
||||||
|
"@rollup/rollup-win32-x64-msvc": "4.19.0",
|
||||||
|
"fsevents": "~2.3.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/source-map-js": {
|
||||||
|
"version": "1.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz",
|
||||||
|
"integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==",
|
||||||
|
"dev": true,
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/three": {
|
||||||
|
"version": "0.166.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/three/-/three-0.166.1.tgz",
|
||||||
|
"integrity": "sha512-LtuafkKHHzm61AQA1be2MAYIw1IjmhOUxhBa0prrLpEMWbV7ijvxCRHjSgHPGp2493wLBzwKV46tA9nivLEgKg=="
|
||||||
|
},
|
||||||
|
"node_modules/vite": {
|
||||||
|
"version": "5.3.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/vite/-/vite-5.3.4.tgz",
|
||||||
|
"integrity": "sha512-Cw+7zL3ZG9/NZBB8C+8QbQZmR54GwqIz+WMI4b3JgdYJvX+ny9AjJXqkGQlDXSXRP9rP0B4tbciRMOVEKulVOA==",
|
||||||
|
"dev": true,
|
||||||
|
"dependencies": {
|
||||||
|
"esbuild": "^0.21.3",
|
||||||
|
"postcss": "^8.4.39",
|
||||||
|
"rollup": "^4.13.0"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"vite": "bin/vite.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "^18.0.0 || >=20.0.0"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/vitejs/vite?sponsor=1"
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"fsevents": "~2.3.3"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@types/node": "^18.0.0 || >=20.0.0",
|
||||||
|
"less": "*",
|
||||||
|
"lightningcss": "^1.21.0",
|
||||||
|
"sass": "*",
|
||||||
|
"stylus": "*",
|
||||||
|
"sugarss": "*",
|
||||||
|
"terser": "^5.4.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"@types/node": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"less": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"lightningcss": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"sass": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"stylus": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"sugarss": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"terser": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,18 @@
|
||||||
|
{
|
||||||
|
"name": "piermesh",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "echo \"Error: no test specified\" && exit 1"
|
||||||
|
},
|
||||||
|
"author": "",
|
||||||
|
"license": "ISC",
|
||||||
|
"description": "",
|
||||||
|
"dependencies": {
|
||||||
|
"htmx.org": "2.0.0",
|
||||||
|
"three": "^0.166.1"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"vite": "^5.3.4"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,467 @@
|
||||||
|
/*
|
||||||
|
WebSockets Extension
|
||||||
|
============================
|
||||||
|
This extension adds support for WebSockets to htmx. See /www/extensions/ws.md for usage instructions.
|
||||||
|
*/
|
||||||
|
|
||||||
|
(function() {
|
||||||
|
/** @type {import("../htmx").HtmxInternalApi} */
|
||||||
|
var api
|
||||||
|
|
||||||
|
htmx.defineExtension('ws', {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* init is called once, when this extension is first registered.
|
||||||
|
* @param {import("../htmx").HtmxInternalApi} apiRef
|
||||||
|
*/
|
||||||
|
init: function(apiRef) {
|
||||||
|
// Store reference to internal API
|
||||||
|
api = apiRef
|
||||||
|
|
||||||
|
// Default function for creating new EventSource objects
|
||||||
|
if (!htmx.createWebSocket) {
|
||||||
|
htmx.createWebSocket = createWebSocket
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default setting for reconnect delay
|
||||||
|
if (!htmx.config.wsReconnectDelay) {
|
||||||
|
htmx.config.wsReconnectDelay = 'full-jitter'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* onEvent handles all events passed to this extension.
|
||||||
|
*
|
||||||
|
* @param {string} name
|
||||||
|
* @param {Event} evt
|
||||||
|
*/
|
||||||
|
onEvent: function(name, evt) {
|
||||||
|
var parent = evt.target || evt.detail.elt
|
||||||
|
switch (name) {
|
||||||
|
// Try to close the socket when elements are removed
|
||||||
|
case 'htmx:beforeCleanupElement':
|
||||||
|
|
||||||
|
var internalData = api.getInternalData(parent)
|
||||||
|
|
||||||
|
if (internalData.webSocket) {
|
||||||
|
internalData.webSocket.close()
|
||||||
|
}
|
||||||
|
return
|
||||||
|
|
||||||
|
// Try to create websockets when elements are processed
|
||||||
|
case 'htmx:beforeProcessNode':
|
||||||
|
|
||||||
|
forEach(queryAttributeOnThisOrChildren(parent, 'ws-connect'), function(child) {
|
||||||
|
ensureWebSocket(child)
|
||||||
|
})
|
||||||
|
forEach(queryAttributeOnThisOrChildren(parent, 'ws-send'), function(child) {
|
||||||
|
ensureWebSocketSend(child)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
function splitOnWhitespace(trigger) {
|
||||||
|
return trigger.trim().split(/\s+/)
|
||||||
|
}
|
||||||
|
|
||||||
|
function getLegacyWebsocketURL(elt) {
|
||||||
|
var legacySSEValue = api.getAttributeValue(elt, 'hx-ws')
|
||||||
|
if (legacySSEValue) {
|
||||||
|
var values = splitOnWhitespace(legacySSEValue)
|
||||||
|
for (var i = 0; i < values.length; i++) {
|
||||||
|
var value = values[i].split(/:(.+)/)
|
||||||
|
if (value[0] === 'connect') {
|
||||||
|
return value[1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ensureWebSocket creates a new WebSocket on the designated element, using
|
||||||
|
* the element's "ws-connect" attribute.
|
||||||
|
* @param {HTMLElement} socketElt
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
function ensureWebSocket(socketElt) {
|
||||||
|
// If the element containing the WebSocket connection no longer exists, then
|
||||||
|
// do not connect/reconnect the WebSocket.
|
||||||
|
if (!api.bodyContains(socketElt)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the source straight from the element's value
|
||||||
|
var wssSource = api.getAttributeValue(socketElt, 'ws-connect')
|
||||||
|
|
||||||
|
if (wssSource == null || wssSource === '') {
|
||||||
|
var legacySource = getLegacyWebsocketURL(socketElt)
|
||||||
|
if (legacySource == null) {
|
||||||
|
return
|
||||||
|
} else {
|
||||||
|
wssSource = legacySource
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Guarantee that the wssSource value is a fully qualified URL
|
||||||
|
if (wssSource.indexOf('/') === 0) {
|
||||||
|
var base_part = location.hostname + (location.port ? ':' + location.port : '')
|
||||||
|
if (location.protocol === 'https:') {
|
||||||
|
wssSource = 'wss://' + base_part + wssSource
|
||||||
|
} else if (location.protocol === 'http:') {
|
||||||
|
wssSource = 'ws://' + base_part + wssSource
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var socketWrapper = createWebsocketWrapper(socketElt, function() {
|
||||||
|
return htmx.createWebSocket(wssSource)
|
||||||
|
})
|
||||||
|
|
||||||
|
socketWrapper.addEventListener('message', function(event) {
|
||||||
|
if (maybeCloseWebSocketSource(socketElt)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var response = event.data
|
||||||
|
if (!api.triggerEvent(socketElt, 'htmx:wsBeforeMessage', {
|
||||||
|
message: response,
|
||||||
|
socketWrapper: socketWrapper.publicInterface
|
||||||
|
})) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
api.withExtensions(socketElt, function(extension) {
|
||||||
|
response = extension.transformResponse(response, null, socketElt)
|
||||||
|
})
|
||||||
|
|
||||||
|
var settleInfo = api.makeSettleInfo(socketElt)
|
||||||
|
var fragment = api.makeFragment(response)
|
||||||
|
|
||||||
|
if (fragment.children.length) {
|
||||||
|
var children = Array.from(fragment.children)
|
||||||
|
for (var i = 0; i < children.length; i++) {
|
||||||
|
api.oobSwap(api.getAttributeValue(children[i], 'hx-swap-oob') || 'true', children[i], settleInfo)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
api.settleImmediately(settleInfo.tasks)
|
||||||
|
api.triggerEvent(socketElt, 'htmx:wsAfterMessage', { message: response, socketWrapper: socketWrapper.publicInterface })
|
||||||
|
})
|
||||||
|
|
||||||
|
// Put the WebSocket into the HTML Element's custom data.
|
||||||
|
api.getInternalData(socketElt).webSocket = socketWrapper
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {Object} WebSocketWrapper
|
||||||
|
* @property {WebSocket} socket
|
||||||
|
* @property {Array<{message: string, sendElt: Element}>} messageQueue
|
||||||
|
* @property {number} retryCount
|
||||||
|
* @property {(message: string, sendElt: Element) => void} sendImmediately sendImmediately sends message regardless of websocket connection state
|
||||||
|
* @property {(message: string, sendElt: Element) => void} send
|
||||||
|
* @property {(event: string, handler: Function) => void} addEventListener
|
||||||
|
* @property {() => void} handleQueuedMessages
|
||||||
|
* @property {() => void} init
|
||||||
|
* @property {() => void} close
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param socketElt
|
||||||
|
* @param socketFunc
|
||||||
|
* @returns {WebSocketWrapper}
|
||||||
|
*/
|
||||||
|
function createWebsocketWrapper(socketElt, socketFunc) {
|
||||||
|
var wrapper = {
|
||||||
|
socket: null,
|
||||||
|
messageQueue: [],
|
||||||
|
retryCount: 0,
|
||||||
|
|
||||||
|
/** @type {Object<string, Function[]>} */
|
||||||
|
events: {},
|
||||||
|
|
||||||
|
addEventListener: function(event, handler) {
|
||||||
|
if (this.socket) {
|
||||||
|
this.socket.addEventListener(event, handler)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.events[event]) {
|
||||||
|
this.events[event] = []
|
||||||
|
}
|
||||||
|
|
||||||
|
this.events[event].push(handler)
|
||||||
|
},
|
||||||
|
|
||||||
|
sendImmediately: function(message, sendElt) {
|
||||||
|
if (!this.socket) {
|
||||||
|
api.triggerErrorEvent()
|
||||||
|
}
|
||||||
|
if (!sendElt || api.triggerEvent(sendElt, 'htmx:wsBeforeSend', {
|
||||||
|
message,
|
||||||
|
socketWrapper: this.publicInterface
|
||||||
|
})) {
|
||||||
|
this.socket.send(message)
|
||||||
|
sendElt && api.triggerEvent(sendElt, 'htmx:wsAfterSend', {
|
||||||
|
message,
|
||||||
|
socketWrapper: this.publicInterface
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
send: function(message, sendElt) {
|
||||||
|
if (this.socket.readyState !== this.socket.OPEN) {
|
||||||
|
this.messageQueue.push({ message, sendElt })
|
||||||
|
} else {
|
||||||
|
this.sendImmediately(message, sendElt)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
handleQueuedMessages: function() {
|
||||||
|
while (this.messageQueue.length > 0) {
|
||||||
|
var queuedItem = this.messageQueue[0]
|
||||||
|
if (this.socket.readyState === this.socket.OPEN) {
|
||||||
|
this.sendImmediately(queuedItem.message, queuedItem.sendElt)
|
||||||
|
this.messageQueue.shift()
|
||||||
|
} else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
init: function() {
|
||||||
|
if (this.socket && this.socket.readyState === this.socket.OPEN) {
|
||||||
|
// Close discarded socket
|
||||||
|
this.socket.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new WebSocket and event handlers
|
||||||
|
/** @type {WebSocket} */
|
||||||
|
var socket = socketFunc()
|
||||||
|
|
||||||
|
// The event.type detail is added for interface conformance with the
|
||||||
|
// other two lifecycle events (open and close) so a single handler method
|
||||||
|
// can handle them polymorphically, if required.
|
||||||
|
api.triggerEvent(socketElt, 'htmx:wsConnecting', { event: { type: 'connecting' } })
|
||||||
|
|
||||||
|
this.socket = socket
|
||||||
|
|
||||||
|
socket.onopen = function(e) {
|
||||||
|
wrapper.retryCount = 0
|
||||||
|
api.triggerEvent(socketElt, 'htmx:wsOpen', { event: e, socketWrapper: wrapper.publicInterface })
|
||||||
|
wrapper.handleQueuedMessages()
|
||||||
|
}
|
||||||
|
|
||||||
|
socket.onclose = function(e) {
|
||||||
|
// If socket should not be connected, stop further attempts to establish connection
|
||||||
|
// If Abnormal Closure/Service Restart/Try Again Later, then set a timer to reconnect after a pause.
|
||||||
|
if (!maybeCloseWebSocketSource(socketElt) && [1006, 1012, 1013].indexOf(e.code) >= 0) {
|
||||||
|
var delay = getWebSocketReconnectDelay(wrapper.retryCount)
|
||||||
|
setTimeout(function() {
|
||||||
|
wrapper.retryCount += 1
|
||||||
|
wrapper.init()
|
||||||
|
}, delay)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify client code that connection has been closed. Client code can inspect `event` field
|
||||||
|
// to determine whether closure has been valid or abnormal
|
||||||
|
api.triggerEvent(socketElt, 'htmx:wsClose', { event: e, socketWrapper: wrapper.publicInterface })
|
||||||
|
}
|
||||||
|
|
||||||
|
socket.onerror = function(e) {
|
||||||
|
api.triggerErrorEvent(socketElt, 'htmx:wsError', { error: e, socketWrapper: wrapper })
|
||||||
|
maybeCloseWebSocketSource(socketElt)
|
||||||
|
}
|
||||||
|
|
||||||
|
var events = this.events
|
||||||
|
Object.keys(events).forEach(function(k) {
|
||||||
|
events[k].forEach(function(e) {
|
||||||
|
socket.addEventListener(k, e)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
close: function() {
|
||||||
|
this.socket.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
wrapper.init()
|
||||||
|
|
||||||
|
wrapper.publicInterface = {
|
||||||
|
send: wrapper.send.bind(wrapper),
|
||||||
|
sendImmediately: wrapper.sendImmediately.bind(wrapper),
|
||||||
|
queue: wrapper.messageQueue
|
||||||
|
}
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ensureWebSocketSend attaches trigger handles to elements with
|
||||||
|
* "ws-send" attribute
|
||||||
|
* @param {HTMLElement} elt
|
||||||
|
*/
|
||||||
|
function ensureWebSocketSend(elt) {
|
||||||
|
var legacyAttribute = api.getAttributeValue(elt, 'hx-ws')
|
||||||
|
if (legacyAttribute && legacyAttribute !== 'send') {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var webSocketParent = api.getClosestMatch(elt, hasWebSocket)
|
||||||
|
processWebSocketSend(webSocketParent, elt)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* hasWebSocket function checks if a node has webSocket instance attached
|
||||||
|
* @param {HTMLElement} node
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
function hasWebSocket(node) {
|
||||||
|
return api.getInternalData(node).webSocket != null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* processWebSocketSend adds event listeners to the <form> element so that
|
||||||
|
* messages can be sent to the WebSocket server when the form is submitted.
|
||||||
|
* @param {HTMLElement} socketElt
|
||||||
|
* @param {HTMLElement} sendElt
|
||||||
|
*/
|
||||||
|
function processWebSocketSend(socketElt, sendElt) {
|
||||||
|
var nodeData = api.getInternalData(sendElt)
|
||||||
|
var triggerSpecs = api.getTriggerSpecs(sendElt)
|
||||||
|
triggerSpecs.forEach(function(ts) {
|
||||||
|
api.addTriggerHandler(sendElt, ts, nodeData, function(elt, evt) {
|
||||||
|
if (maybeCloseWebSocketSource(socketElt)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @type {WebSocketWrapper} */
|
||||||
|
var socketWrapper = api.getInternalData(socketElt).webSocket
|
||||||
|
var headers = api.getHeaders(sendElt, api.getTarget(sendElt))
|
||||||
|
var results = api.getInputValues(sendElt, 'post')
|
||||||
|
var errors = results.errors
|
||||||
|
var rawParameters = Object.assign({}, results.values)
|
||||||
|
var expressionVars = api.getExpressionVars(sendElt)
|
||||||
|
var allParameters = api.mergeObjects(rawParameters, expressionVars)
|
||||||
|
var filteredParameters = api.filterValues(allParameters, sendElt)
|
||||||
|
|
||||||
|
var sendConfig = {
|
||||||
|
parameters: filteredParameters,
|
||||||
|
unfilteredParameters: allParameters,
|
||||||
|
headers,
|
||||||
|
errors,
|
||||||
|
|
||||||
|
triggeringEvent: evt,
|
||||||
|
messageBody: undefined,
|
||||||
|
socketWrapper: socketWrapper.publicInterface
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!api.triggerEvent(elt, 'htmx:wsConfigSend', sendConfig)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errors && errors.length > 0) {
|
||||||
|
api.triggerEvent(elt, 'htmx:validation:halted', errors)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var body = sendConfig.messageBody
|
||||||
|
if (body === undefined) {
|
||||||
|
var toSend = Object.assign({}, sendConfig.parameters)
|
||||||
|
if (sendConfig.headers) { toSend.HEADERS = headers }
|
||||||
|
body = JSON.stringify(toSend)
|
||||||
|
}
|
||||||
|
|
||||||
|
socketWrapper.send(body, elt)
|
||||||
|
|
||||||
|
if (evt && api.shouldCancel(evt, elt)) {
|
||||||
|
evt.preventDefault()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* getWebSocketReconnectDelay is the default easing function for WebSocket reconnects.
|
||||||
|
* @param {number} retryCount // The number of retries that have already taken place
|
||||||
|
* @returns {number}
|
||||||
|
*/
|
||||||
|
function getWebSocketReconnectDelay(retryCount) {
|
||||||
|
/** @type {"full-jitter" | ((retryCount:number) => number)} */
|
||||||
|
var delay = htmx.config.wsReconnectDelay
|
||||||
|
if (typeof delay === 'function') {
|
||||||
|
return delay(retryCount)
|
||||||
|
}
|
||||||
|
if (delay === 'full-jitter') {
|
||||||
|
var exp = Math.min(retryCount, 6)
|
||||||
|
var maxDelay = 1000 * Math.pow(2, exp)
|
||||||
|
return maxDelay * Math.random()
|
||||||
|
}
|
||||||
|
|
||||||
|
logError('htmx.config.wsReconnectDelay must either be a function or the string "full-jitter"')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* maybeCloseWebSocketSource checks to the if the element that created the WebSocket
|
||||||
|
* still exists in the DOM. If NOT, then the WebSocket is closed and this function
|
||||||
|
* returns TRUE. If the element DOES EXIST, then no action is taken, and this function
|
||||||
|
* returns FALSE.
|
||||||
|
*
|
||||||
|
* @param {*} elt
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
function maybeCloseWebSocketSource(elt) {
|
||||||
|
if (!api.bodyContains(elt)) {
|
||||||
|
api.getInternalData(elt).webSocket.close()
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* createWebSocket is the default method for creating new WebSocket objects.
|
||||||
|
* it is hoisted into htmx.createWebSocket to be overridden by the user, if needed.
|
||||||
|
*
|
||||||
|
* @param {string} url
|
||||||
|
* @returns WebSocket
|
||||||
|
*/
|
||||||
|
function createWebSocket(url) {
|
||||||
|
var sock = new WebSocket(url, [])
|
||||||
|
sock.binaryType = htmx.config.wsBinaryType
|
||||||
|
return sock
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* queryAttributeOnThisOrChildren returns all nodes that contain the requested attributeName, INCLUDING THE PROVIDED ROOT ELEMENT.
|
||||||
|
*
|
||||||
|
* @param {HTMLElement} elt
|
||||||
|
* @param {string} attributeName
|
||||||
|
*/
|
||||||
|
function queryAttributeOnThisOrChildren(elt, attributeName) {
|
||||||
|
var result = []
|
||||||
|
|
||||||
|
// If the parent element also contains the requested attribute, then add it to the results too.
|
||||||
|
if (api.hasAttribute(elt, attributeName) || api.hasAttribute(elt, 'hx-ws')) {
|
||||||
|
result.push(elt)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Search all child nodes that match the requested attribute
|
||||||
|
elt.querySelectorAll('[' + attributeName + '], [data-' + attributeName + '], [data-hx-ws], [hx-ws]').forEach(function(node) {
|
||||||
|
result.push(node)
|
||||||
|
})
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @template T
|
||||||
|
* @param {T[]} arr
|
||||||
|
* @param {(T) => void} func
|
||||||
|
*/
|
||||||
|
function forEach(arr, func) {
|
||||||
|
if (arr) {
|
||||||
|
for (var i = 0; i < arr.length; i++) {
|
||||||
|
func(arr[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})()
|
|
@ -0,0 +1,175 @@
|
||||||
|
from microdot import Microdot
|
||||||
|
from microdot import send_file
|
||||||
|
from microdot.websocket import with_websocket
|
||||||
|
from microdot import Request
|
||||||
|
|
||||||
|
import random
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import msgpack
|
||||||
|
|
||||||
|
# Enable 500 kB files in the webui
|
||||||
|
Request.max_content_length = 1024 * 1024 * 0.5
|
||||||
|
Request.max_body_length = 1024 * 1024 * 0.5
|
||||||
|
Request.max_readline = 1024 * 1024
|
||||||
|
|
||||||
|
|
||||||
|
class Server:
|
||||||
|
"""
|
||||||
|
Web server that serves the web ui and provides web to node communication
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
cLog
|
||||||
|
Reference to `run.Node.cLog` for logging
|
||||||
|
|
||||||
|
transmitter: Transmission.transmission.Transmitter
|
||||||
|
Reference to our `Transmission.transmission.Transmitter` instance
|
||||||
|
|
||||||
|
network: Siph.Network.Network
|
||||||
|
Reference to our `Siph.Network.Network`
|
||||||
|
|
||||||
|
nodeID: str
|
||||||
|
String converted PierMesh node ID
|
||||||
|
|
||||||
|
peerIDs: dict
|
||||||
|
Map of peer IDs to Websocket sessions
|
||||||
|
|
||||||
|
app
|
||||||
|
Microdot server instance
|
||||||
|
|
||||||
|
catch: Daisy.Catch.Catch
|
||||||
|
Reference to our Catch Cache instance to pull from for serving Catchs
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, transceiver, catch, onodeID, network, cLog):
|
||||||
|
self.cLog = cLog
|
||||||
|
self.transceiver = transceiver
|
||||||
|
self.network = network
|
||||||
|
self.network.addLookup(onodeID, self.transceiver.interface.localNode.nodeNum)
|
||||||
|
self.nodeID = str(onodeID)
|
||||||
|
self.peerIDs = {}
|
||||||
|
self.app = Microdot()
|
||||||
|
self.catch = catch
|
||||||
|
# self.nmap = {self.nodeID: self.t.interface.localNode.nodeNum}
|
||||||
|
# self.cLog(20, "Initialized server")
|
||||||
|
|
||||||
|
@self.app.route("/res/<path:path>")
|
||||||
|
async def static(request, path):
|
||||||
|
"""
|
||||||
|
Static resources endpoint
|
||||||
|
"""
|
||||||
|
if ".." in path:
|
||||||
|
# directory traversal is not allowed
|
||||||
|
return "Not found", 404
|
||||||
|
return send_file("webui/build/res/" + path, max_age=86400)
|
||||||
|
|
||||||
|
@self.app.route("/bubble")
|
||||||
|
@with_websocket
|
||||||
|
async def bubble(request, ws):
|
||||||
|
"""
|
||||||
|
Websocket handler that bridges HTMX to our transmitter
|
||||||
|
|
||||||
|
Notes
|
||||||
|
-----
|
||||||
|
`🔗 HTMX docs <https://htmx.org/docs/>`_
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
r = await ws.receive()
|
||||||
|
message = json.loads(r)
|
||||||
|
trigger = message["HEADERS"]["HX-Trigger"]
|
||||||
|
# TODO: Drop old id from cache on regen
|
||||||
|
if trigger == "gpID":
|
||||||
|
peerID = str(random.randrange(0, 1000000)).zfill(6)
|
||||||
|
await ws.send(
|
||||||
|
"""
|
||||||
|
<p id="vpeerID">Peer ID: {0}</p>
|
||||||
|
""".format(
|
||||||
|
peerID
|
||||||
|
)
|
||||||
|
)
|
||||||
|
await ws.send(
|
||||||
|
"""
|
||||||
|
<input id="peerID" type="hidden" value="{0}" >
|
||||||
|
""".format(
|
||||||
|
peerID
|
||||||
|
)
|
||||||
|
)
|
||||||
|
await ws.send(
|
||||||
|
"""
|
||||||
|
<p id="vnodeID">Node ID: {0}</p>
|
||||||
|
""".format(
|
||||||
|
self.nodeID
|
||||||
|
)
|
||||||
|
)
|
||||||
|
await ws.send(
|
||||||
|
""" <input id="nodeID" type="hidden" value="{0}" >""".format(
|
||||||
|
self.nodeID
|
||||||
|
)
|
||||||
|
)
|
||||||
|
await ws.send(
|
||||||
|
"<input id='gID' type='hidden' value='{0}' hx-swap-oob='true'>".format(
|
||||||
|
peerID
|
||||||
|
)
|
||||||
|
)
|
||||||
|
await ws.send(
|
||||||
|
"<input type='hidden' name='eID' value='{0}' hx-swap-oob='true'>".format(
|
||||||
|
peerID
|
||||||
|
)
|
||||||
|
)
|
||||||
|
peer = {"usedLast": round(time.time() * 1000), "ws": ws}
|
||||||
|
self.peerIDs[peerID] = peer
|
||||||
|
elif trigger == "bubble":
|
||||||
|
sender = message["bID"]
|
||||||
|
data = message["chat_message"]
|
||||||
|
# TODO: Setting sender name id
|
||||||
|
# senderName = message["senderID"]
|
||||||
|
senderName = 000000
|
||||||
|
recipient = message["recipientID"]
|
||||||
|
recipientNode = message["recipientNode"]
|
||||||
|
await self.t.addPackets(
|
||||||
|
msgpack.dumps({"data": data}),
|
||||||
|
sender,
|
||||||
|
senderName,
|
||||||
|
recipient,
|
||||||
|
int(recipientNode),
|
||||||
|
directID=self.network.doLookup(recipientNode),
|
||||||
|
packetsClass=2,
|
||||||
|
)
|
||||||
|
elif trigger == "catch":
|
||||||
|
res = self.catch.get(message["head"], message["body"])
|
||||||
|
await ws.send('<div id="catchDisplay">{0}</div>'.format(res))
|
||||||
|
# TODO: Catch update packets
|
||||||
|
elif trigger == "catchEdit":
|
||||||
|
self.catch.addc(
|
||||||
|
message["eID"],
|
||||||
|
self.nodeID,
|
||||||
|
message["sep"],
|
||||||
|
message["head"],
|
||||||
|
message["body"],
|
||||||
|
{"html": message["catchContent"]},
|
||||||
|
)
|
||||||
|
await ws.send(
|
||||||
|
"""
|
||||||
|
<ul id="resultsCatch" hx-swap-oob='true'><li>OK</li></ul>
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
await ws.send(
|
||||||
|
"""<div id="chat_room" hx-swap-oob="beforeend">hi</div>"""
|
||||||
|
)
|
||||||
|
|
||||||
|
@self.app.route("/")
|
||||||
|
async def index(request):
|
||||||
|
"""
|
||||||
|
Static handler to serve the web ui
|
||||||
|
"""
|
||||||
|
return send_file("webui/build/index/index.html")
|
||||||
|
|
||||||
|
async def sendToPeer(self, peerID: str, data: str):
|
||||||
|
"""
|
||||||
|
Send data to Websocket of peer with peerID
|
||||||
|
"""
|
||||||
|
await self.peerIDs[peerID]["ws"].send(
|
||||||
|
"<ul id='chat_room' hx-swap-oob='afterend'><li>{0}</li></ul>".format(data)
|
||||||
|
)
|
|
@ -0,0 +1,5 @@
|
||||||
|
serve: Web UI server
|
||||||
|
============================
|
||||||
|
|
||||||
|
.. autoclass:: Splash.serve.Server
|
||||||
|
:members:
|
|
@ -0,0 +1,22 @@
|
||||||
|
{% extends "shared/base.html" %}
|
||||||
|
{% block body %}
|
||||||
|
|
||||||
|
<img alt="PierMesh logo" height="128px" src="/res/img/logo.png">
|
||||||
|
<br>
|
||||||
|
<br>
|
||||||
|
{% include "shared/catch.nav.html" %}
|
||||||
|
<br>
|
||||||
|
{% include "shared/catch.editor.html" %}
|
||||||
|
<div hx-history="false">
|
||||||
|
</div>
|
||||||
|
<br>
|
||||||
|
<div class="plank" hx-ext="ws" ws-connect="/bubble">
|
||||||
|
<p id="vpeerID">Peer ID:</p>
|
||||||
|
<input id="peerID" type="hidden" >
|
||||||
|
<p id="vnodeID">Node ID:</p>
|
||||||
|
<input id="peerID" type="hidden" >
|
||||||
|
<button id="gpID" ws-send>Connect</button>
|
||||||
|
</div>
|
||||||
|
<br>
|
||||||
|
{% include "shared/messenger.html" %}
|
||||||
|
{% endblock %}
|
|
@ -0,0 +1,21 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<title>{{ title }}</title>
|
||||||
|
<link rel="stylesheet" type="text/css" href="/res/css/fonts.css">
|
||||||
|
<link rel="stylesheet" type="text/css" href="/res/css/style.css">
|
||||||
|
<script src="/res/js/node_modules/htmx.org/dist/htmx.min.js"></script>
|
||||||
|
<script src="/res/js/ws.js">
|
||||||
|
</script>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
{% include "shared/nav.html" %}
|
||||||
|
{% block body %}
|
||||||
|
{% endblock %}
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
|
@ -0,0 +1,23 @@
|
||||||
|
<div class="plank" hx-ext="ws" ws-connect="/bubble">
|
||||||
|
<img src="/res/img/catchdisplay.png">
|
||||||
|
<br>
|
||||||
|
Catch publisher<br>
|
||||||
|
<ul id="resultsCatch">
|
||||||
|
</ul>
|
||||||
|
<form id="catchEdit" ws-send>
|
||||||
|
Head <br> <input type="text" name="head" size="4" maxlength="4"><br>
|
||||||
|
Seperator <br> <input type="text" name="sep" size="1" maxlength="1"><br>
|
||||||
|
Body <br> <input type="text" name="body" size="16" maxlength="16"><br>
|
||||||
|
Fins<br>
|
||||||
|
<ul id="fins">
|
||||||
|
<li class="fin"> <input type="text" size="8" maxlength="8"> </li>
|
||||||
|
<li><button>+</button></li>
|
||||||
|
</ul>
|
||||||
|
Content
|
||||||
|
<br>
|
||||||
|
<textarea style="min-width: 200px;min-height:200px;" name="catchContent"></textarea>
|
||||||
|
<br>
|
||||||
|
<button onclick="document.getElementById('eID').value = document.getElementById('peerID').value">Publish</button>
|
||||||
|
<input type="hidden" name="eID" id="eID">
|
||||||
|
</form>
|
||||||
|
</div>
|
|
@ -0,0 +1 @@
|
||||||
|
<div style="background-color: var(--palette-three);" id="catchDisplay"></div>
|
|
@ -0,0 +1,31 @@
|
||||||
|
<div class="plank" hx-ext="ws" ws-connect="/bubble">
|
||||||
|
<img src="/res/img/catchdisplay.png">
|
||||||
|
<br>
|
||||||
|
Catch<br><br>
|
||||||
|
<form id="catch" ws-send>
|
||||||
|
<label for="head">Head (max. 4 characters)</label>
|
||||||
|
<br>
|
||||||
|
<input type="text" id="head" name="head" size="4" maxlength="4">
|
||||||
|
<br>
|
||||||
|
<label for="sep">Seperator</label>
|
||||||
|
<br>
|
||||||
|
<input type="text" id="sep" name="sep" size="1" maxlength="1"><br>
|
||||||
|
<label for="body">Body (max. 16 characters)</label>
|
||||||
|
<br>
|
||||||
|
<input type="text" id="body" name="body" size="16" maxlength="16">
|
||||||
|
<ul id="fins">
|
||||||
|
Fins:
|
||||||
|
<li class="fin">
|
||||||
|
<input type="text" size="8" maxlength="8">
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<button>+</button>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
<button>Get</button>
|
||||||
|
</form>
|
||||||
|
Results:
|
||||||
|
<br>
|
||||||
|
{% include "shared/catch.html" %}
|
||||||
|
<br>
|
||||||
|
</div>
|
|
@ -0,0 +1,20 @@
|
||||||
|
<div class="plank" hx-ext="ws" ws-connect="/bubble">
|
||||||
|
<img src="/res/img/bubbledisplay.png">
|
||||||
|
<br>
|
||||||
|
Bubble
|
||||||
|
<br>
|
||||||
|
<br>
|
||||||
|
Responses: <ul id="chat_room" hx-swap="afterend">
|
||||||
|
</ul>
|
||||||
|
<br>
|
||||||
|
<form id="bubble" ws-send>
|
||||||
|
Peer ID:<br>
|
||||||
|
<input name="recipientID" id="recipientID" type="number" max="999999"><br>
|
||||||
|
Node ID:<br>
|
||||||
|
<input name="recipientNode" id="recipientNode" type="number" max="999999"><br>
|
||||||
|
Data<br> <textarea style="min-width: 200px;min-height: 200px;" type="textarea" name="chat_message"></textarea>
|
||||||
|
<br>
|
||||||
|
<input type="hidden" name="bID" id="bID">
|
||||||
|
<button onclick="document.getElementById('bID').value = document.getElementById('peerID').value">Send</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
|
@ -0,0 +1 @@
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
async def filter(completeMessage, recipient, recipientNode, onodeID, todo):
|
async def filter(completeMessage, recipient, recipientNode, onodeID, todo):
|
||||||
"""
|
"""
|
||||||
Peer to peer protol
|
Peer to peer protol
|
||||||
|
|
||||||
|
`🔗 Bubble Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/Sponge/Protocols/bubble.py>`_
|
||||||
"""
|
"""
|
||||||
m = completeMessage
|
m = completeMessage
|
||||||
if recipientNode == onodeID:
|
if recipientNode == onodeID:
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
async def filter(completeMessage, recipient, recipientNode, todo):
|
async def filter(completeMessage, recipient, recipientNode, todo):
|
||||||
"""
|
"""
|
||||||
Catch exchange protocol
|
Catch exchange protocol
|
||||||
|
|
||||||
|
`🔗 Catch Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/catch.py>`_
|
||||||
"""
|
"""
|
||||||
m = completeMessage
|
m = completeMessage
|
||||||
# TODO: Sending to other nodes clients
|
# TODO: Sending to other nodes clients
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
import logging
|
|
||||||
|
|
||||||
|
|
||||||
async def filter(completeMessage, recipientNode, todo):
|
async def filter(completeMessage, recipientNode, todo):
|
||||||
"""
|
"""
|
||||||
Cryptographic operations protocol
|
Cryptographic operations protocol
|
||||||
|
|
||||||
|
`🔗 Cryptography Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/cryptography.py>`_
|
||||||
"""
|
"""
|
||||||
todo.append(
|
todo.append(
|
||||||
{
|
{
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
async def filter(completeMessage, todo):
|
async def filter(completeMessage, todo):
|
||||||
"""
|
"""
|
||||||
Network mapping protocol
|
Network mapping protocol
|
||||||
|
|
||||||
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/Protocols/map.py>`_
|
||||||
"""
|
"""
|
||||||
m = completeMessage
|
m = completeMessage
|
||||||
todo.append(
|
todo.append(
|
||||||
|
|
|
@ -12,6 +12,8 @@ class Filter:
|
||||||
"""
|
"""
|
||||||
Packet filtering orchestration
|
Packet filtering orchestration
|
||||||
|
|
||||||
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Sponge/base.py>`_
|
||||||
|
|
||||||
cLog
|
cLog
|
||||||
Reference to `run.Node.cLog` for logging
|
Reference to `run.Node.cLog` for logging
|
||||||
|
|
||||||
|
@ -138,10 +140,10 @@ class Filter:
|
||||||
# responseLoop(packets_id)
|
# responseLoop(packets_id)
|
||||||
pass
|
pass
|
||||||
completeMessage = self.messages[packetsID]
|
completeMessage = self.messages[packetsID]
|
||||||
completeMessage["data"] = Packets.reassemble(None, completeMessage)
|
completeMessage["data"] = Message.reassemble(None, completeMessage)
|
||||||
del self.messages[packetsID]
|
del self.messages[packetsID]
|
||||||
self.completed.append(packetsID)
|
self.completed.append(packetsID)
|
||||||
self.cLog(20, "Assembly completed, routing")
|
self.cLog(20, "Assembly completed, routing")
|
||||||
await self.protoRoute(completeMessage)
|
await self.protoRoute(completeMessage)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
self.cLog(30, traceback.print_exc())
|
self.cLog(30, traceback.format_exc())
|
||||||
|
|
|
@ -5,14 +5,11 @@ Dispatches to Protocols
|
||||||
.. autoclass:: Sponge.base.Filter
|
.. autoclass:: Sponge.base.Filter
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
.. autoclass:: Sponge.Protocols.bubble.filter
|
.. automethod:: Sponge.Protocols.bubble.filter
|
||||||
:members:
|
|
||||||
|
|
||||||
.. autoclass:: Sponge.Protocols.catch.filter
|
.. automethod:: Sponge.Protocols.catch.filter
|
||||||
:members:
|
|
||||||
|
|
||||||
.. autoclass:: Sponge.Protocols.cryptography.filter
|
.. automethod:: Sponge.Protocols.cryptography.filter
|
||||||
:members:
|
|
||||||
|
.. automethod:: Sponge.Protocols.map.filter
|
||||||
|
|
||||||
.. autoclass:: Sponge.Protocols.map.filter
|
|
||||||
:members:
|
|
||||||
|
|
|
@ -0,0 +1,302 @@
|
||||||
|
import meshtastic
|
||||||
|
import meshtastic.serial_interface
|
||||||
|
from pubsub import pub
|
||||||
|
from Packets.Message import Message
|
||||||
|
import time
|
||||||
|
|
||||||
|
import msgpack
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
|
||||||
|
class Transceiver:
|
||||||
|
"""
|
||||||
|
Handling LoRa transceiving
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
cLog
|
||||||
|
Reference to `run.Node.cLog` for logging
|
||||||
|
|
||||||
|
cryptographyInfo: Cryptography.WhaleSong.DHEFern
|
||||||
|
Cryptography instance for encrypting transmissions
|
||||||
|
|
||||||
|
filter: Sponge.base.Filter
|
||||||
|
`Sponge.base.Filter` instance for filtering packets
|
||||||
|
|
||||||
|
tcache: Daisy.Cache.Cache
|
||||||
|
Data backend Daisy Cache
|
||||||
|
|
||||||
|
tcatch: Daisy.Catch.Catch
|
||||||
|
Daisy Catch Cache for Catch operations
|
||||||
|
|
||||||
|
notConnected: bool
|
||||||
|
Whether the transceiver has been connected to yet
|
||||||
|
|
||||||
|
acks: dict
|
||||||
|
Acknowledgements received per packet
|
||||||
|
|
||||||
|
onodeID
|
||||||
|
PierMesh node ID
|
||||||
|
|
||||||
|
messages: dict
|
||||||
|
Message completion acknowldgements
|
||||||
|
|
||||||
|
Notes
|
||||||
|
-----
|
||||||
|
TODO: Check if we can remove cpid
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, device, filter, onodeID, cache, catch, cryptographyInfo, cLog):
|
||||||
|
self.cLog = cLog
|
||||||
|
self.cryptographyInfo = cryptographyInfo
|
||||||
|
self.filter = filter
|
||||||
|
self.tcache = cache
|
||||||
|
self.tcatch = catch
|
||||||
|
self.notConnected = True
|
||||||
|
self.messages = {}
|
||||||
|
self.acks = {}
|
||||||
|
self.onodeID = onodeID
|
||||||
|
# Be careful with this
|
||||||
|
self.cpid = 0
|
||||||
|
self.tasks = {}
|
||||||
|
# TODO: use node id to deliver directly
|
||||||
|
pub.subscribe(self.onReceive, "meshtastic.receive")
|
||||||
|
pub.subscribe(self.onConnection, "meshtastic.connection.established")
|
||||||
|
self.interface = meshtastic.serial_interface.SerialInterface(device)
|
||||||
|
i = 0
|
||||||
|
while self.notConnected:
|
||||||
|
if i % 5000000 == 0:
|
||||||
|
self.cLog(20, "Waiting for node initialization...")
|
||||||
|
i += 1
|
||||||
|
self.cLog(20, "Initialized")
|
||||||
|
|
||||||
|
# TODO: Sending packets across multiple nodes/load balancing/distributed packet transmission/reception
|
||||||
|
def onReceive(self, packet, interface):
|
||||||
|
"""
|
||||||
|
Run each received packet through Sponge.base.Filters sieve using a new event loop
|
||||||
|
"""
|
||||||
|
asyncio.new_event_loop().run_until_complete(self.filter.sieve(packet))
|
||||||
|
self.tcache.refresh()
|
||||||
|
|
||||||
|
async def sendAnnounce(self):
|
||||||
|
"""
|
||||||
|
Send an announce packet (contains basic network mapping information) every so often so new nodes autoconnect
|
||||||
|
"""
|
||||||
|
await self.addPackets(
|
||||||
|
msgpack.dumps(
|
||||||
|
{
|
||||||
|
"onodeID": self.onodeID,
|
||||||
|
"mnodeID": self.interface.localNode.nodeNum,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
self.onodeID,
|
||||||
|
None,
|
||||||
|
True,
|
||||||
|
None,
|
||||||
|
packetsClass=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
def onConnection(self, interface, topic=pub.AUTO_TOPIC):
|
||||||
|
"""
|
||||||
|
When the node connects start announce loop and end the waiting loop
|
||||||
|
"""
|
||||||
|
asyncio.run(self.sendAnnounce())
|
||||||
|
self.notConnected = False
|
||||||
|
|
||||||
|
def responseCheck(self, packet):
|
||||||
|
"""
|
||||||
|
On acknowldgement response set acks based on response
|
||||||
|
"""
|
||||||
|
rid = packet["decoded"]["requestId"]
|
||||||
|
if packet["decoded"]["routing"]["errorReason"] == "MAX_RETRANSMIT":
|
||||||
|
self.cLog(20, "Got ack error")
|
||||||
|
self.acks[str(rid)] = False
|
||||||
|
else:
|
||||||
|
self.acks[str(rid)] = True
|
||||||
|
|
||||||
|
# TODO: Threaded send method
|
||||||
|
|
||||||
|
def send(self, packet, recipientNode=False):
|
||||||
|
"""
|
||||||
|
Send individual packet
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
recipientNode
|
||||||
|
If set send to specified node
|
||||||
|
"""
|
||||||
|
interface = self.interface
|
||||||
|
if recipientNode == False:
|
||||||
|
pid = interface.sendData(
|
||||||
|
packet, wantAck=True, onResponse=self.responseCheck
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
pid = interface.sendData(
|
||||||
|
packet,
|
||||||
|
destinationId=recipientNode,
|
||||||
|
wantAck=True,
|
||||||
|
onResponse=self.responseCheck,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Can I use waitForAckNak on cpid?
|
||||||
|
self.cpid = pid.id
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def awaitResponse(self, pid):
|
||||||
|
"""
|
||||||
|
Wait for acknowldgement response
|
||||||
|
"""
|
||||||
|
for i in range(120):
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
if str(pid) in self.acks:
|
||||||
|
break
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def initNodeDH(self, dhefOb, recipientNode, onodeID):
|
||||||
|
"""
|
||||||
|
Send Diffie Hellman initialization message
|
||||||
|
"""
|
||||||
|
await self.addPackets(
|
||||||
|
msgpack.dumps(
|
||||||
|
{"params": dhefOb.getParamsBytes(), "publicKey": dhefOb.publicKey}
|
||||||
|
),
|
||||||
|
self.onodeID,
|
||||||
|
000000,
|
||||||
|
000000,
|
||||||
|
onodeID,
|
||||||
|
directID=recipientNode,
|
||||||
|
packetsClass=3,
|
||||||
|
)
|
||||||
|
|
||||||
|
def awaitFullResponse(self, pid):
|
||||||
|
"""
|
||||||
|
TODO
|
||||||
|
|
||||||
|
Wait for message completed response
|
||||||
|
"""
|
||||||
|
for i in range(1_000_000_000):
|
||||||
|
time.sleep(5)
|
||||||
|
if pid in self.messages.keys():
|
||||||
|
if self.messages[pid]["finished"]:
|
||||||
|
break
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def addPackets(
|
||||||
|
self,
|
||||||
|
data,
|
||||||
|
sender,
|
||||||
|
senderName,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
directID=False,
|
||||||
|
packetsClass=None,
|
||||||
|
encrypt=False,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Convert binary data to Message and send each packet
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
data: bytes
|
||||||
|
Data to send
|
||||||
|
|
||||||
|
sender
|
||||||
|
Peer/Node ID of sender
|
||||||
|
|
||||||
|
senderName
|
||||||
|
ID matching specific user title
|
||||||
|
|
||||||
|
recipient
|
||||||
|
Peer/Node ID of recipient
|
||||||
|
|
||||||
|
recipientNode
|
||||||
|
Node ID of node to route to
|
||||||
|
|
||||||
|
directID
|
||||||
|
If set send to this Node only
|
||||||
|
|
||||||
|
packetsClass
|
||||||
|
Protocol for message
|
||||||
|
"""
|
||||||
|
tp = Message(
|
||||||
|
data,
|
||||||
|
sender,
|
||||||
|
senderName,
|
||||||
|
recipient,
|
||||||
|
recipientNode,
|
||||||
|
packetsClass=packetsClass,
|
||||||
|
)
|
||||||
|
for p in tp.packets:
|
||||||
|
if recipientNode == None:
|
||||||
|
self.send(p)
|
||||||
|
else:
|
||||||
|
self.cLog(10, "Sending target: " + str(directID))
|
||||||
|
if directID != False:
|
||||||
|
recipientNode = directID
|
||||||
|
self.send(p, recipientNode=recipientNode)
|
||||||
|
awaitTask = asyncio.create_task(self.awaitResponse(self.cpid))
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
currentTask = {
|
||||||
|
"ob": awaitTask,
|
||||||
|
"pid": str(self.cpid),
|
||||||
|
"packet": p,
|
||||||
|
"retry": False,
|
||||||
|
}
|
||||||
|
self.tasks[str(self.cpid)] = currentTask
|
||||||
|
|
||||||
|
async def progressCheck(self):
|
||||||
|
"""
|
||||||
|
Checks if acknowldgement was received per packet and if not resends
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(90)
|
||||||
|
self.cLog(
|
||||||
|
20, "Checking progress of {0} tasks".format(len(self.tasks.keys()))
|
||||||
|
)
|
||||||
|
doneFlag = True
|
||||||
|
dcTasks = [k for k in self.tasks.keys()]
|
||||||
|
for task in dcTasks:
|
||||||
|
task = self.tasks[task]
|
||||||
|
if task["ob"]:
|
||||||
|
if task["pid"] in self.acks:
|
||||||
|
if not self.acks[task["pid"]]:
|
||||||
|
retry = task["retry"]
|
||||||
|
remove = False
|
||||||
|
if retry == False:
|
||||||
|
retry = 1
|
||||||
|
elif retry < 3:
|
||||||
|
retry += 1
|
||||||
|
else:
|
||||||
|
self.cLog(30, "Too many retries")
|
||||||
|
remove = True
|
||||||
|
if remove:
|
||||||
|
del self.tasks[task["pid"]]
|
||||||
|
else:
|
||||||
|
self.cLog(20, "Doing retry")
|
||||||
|
doneFlag = False
|
||||||
|
# TODO: Resend to specific node
|
||||||
|
self.send(task["packet"])
|
||||||
|
await_thread = asyncio.create_task(
|
||||||
|
self.awaitResponse(task["pid"])
|
||||||
|
)
|
||||||
|
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
currentTask = {
|
||||||
|
"ob": await_thread,
|
||||||
|
"pid": str(self.cpid),
|
||||||
|
"packet": task["packet"],
|
||||||
|
}
|
||||||
|
currentTask["retry"] = retry
|
||||||
|
self.tasks[task["pid"]] = currentTask
|
||||||
|
else:
|
||||||
|
del self.tasks[task["pid"]]
|
||||||
|
|
||||||
|
async def announce(self):
|
||||||
|
"""
|
||||||
|
Announce loop runner
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
self.cLog(10, "Announce")
|
||||||
|
await self.sendAnnounce()
|
||||||
|
await asyncio.sleep(180)
|
|
@ -0,0 +1,5 @@
|
||||||
|
Layer 0 data transceiving
|
||||||
|
===============================
|
||||||
|
|
||||||
|
.. autoclass:: Transceiver.Transceiver.Transceiver
|
||||||
|
:members:
|
|
@ -23,6 +23,7 @@ extensions = [
|
||||||
"sphinx_markdown_builder",
|
"sphinx_markdown_builder",
|
||||||
"sphinx.ext.autodoc",
|
"sphinx.ext.autodoc",
|
||||||
"sphinx.ext.napoleon",
|
"sphinx.ext.napoleon",
|
||||||
|
"sphinx.ext.viewcode",
|
||||||
]
|
]
|
||||||
|
|
||||||
templates_path = ["_templates"]
|
templates_path = ["_templates"]
|
||||||
|
|
20
src/run.py
|
@ -150,9 +150,6 @@ class Node:
|
||||||
"""
|
"""
|
||||||
while True:
|
while True:
|
||||||
if tuiOb != None:
|
if tuiOb != None:
|
||||||
if tuiOb.done:
|
|
||||||
print("Terminating PierMesh service...")
|
|
||||||
self.proc.terminate()
|
|
||||||
await asyncio.sleep(10)
|
await asyncio.sleep(10)
|
||||||
memmb = self.proc.memory_info().rss / (1024 * 1024)
|
memmb = self.proc.memory_info().rss / (1024 * 1024)
|
||||||
memmb = round(memmb, 2)
|
memmb = round(memmb, 2)
|
||||||
|
@ -270,7 +267,9 @@ async def logPassLoop():
|
||||||
await asyncio.sleep(1)
|
await asyncio.sleep(1)
|
||||||
if tuiOb == None or nodeOb == None:
|
if tuiOb == None or nodeOb == None:
|
||||||
await asyncio.sleep(1)
|
await asyncio.sleep(1)
|
||||||
elif tuiOb.done == True:
|
elif tuiOb.done:
|
||||||
|
tuiOb.exit()
|
||||||
|
os.system("reset")
|
||||||
print("Terminating PierMesh service...")
|
print("Terminating PierMesh service...")
|
||||||
nodeOb.proc.terminate()
|
nodeOb.proc.terminate()
|
||||||
else:
|
else:
|
||||||
|
@ -315,10 +314,8 @@ async def main():
|
||||||
nodeOb.mTasks["announce"] = asyncio.create_task(nodeOb.oTransceiver.announce())
|
nodeOb.mTasks["announce"] = asyncio.create_task(nodeOb.oTransceiver.announce())
|
||||||
await asyncio.sleep(1)
|
await asyncio.sleep(1)
|
||||||
await nodeOb.server.app.start_server(port=int(sys.argv[2]), debug=True)
|
await nodeOb.server.app.start_server(port=int(sys.argv[2]), debug=True)
|
||||||
except KeyboardInterrupt:
|
|
||||||
sys.exit()
|
|
||||||
except Exception:
|
except Exception:
|
||||||
nodeOb.cLog(20, traceback.format_exc())
|
logging.log(20, traceback.format_exc())
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -330,7 +327,8 @@ if __name__ == "__main__":
|
||||||
tuiOb = TUI()
|
tuiOb = TUI()
|
||||||
tuiOb.nodeOb = nodeOb
|
tuiOb.nodeOb = nodeOb
|
||||||
tuiOb.run()
|
tuiOb.run()
|
||||||
except KeyboardInterrupt:
|
except:
|
||||||
nodeOb.cLog(30, "Terminating PierMesh service...")
|
try:
|
||||||
except Exception:
|
nodeOb.cLog(30, traceback.format_exc())
|
||||||
nodeOb.cLog(30, sys.gettrace())
|
except:
|
||||||
|
logging.log(30, traceback.format_exc())
|
||||||
|
|
|
@ -0,0 +1,454 @@
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import msgpack
|
||||||
|
import random
|
||||||
|
from watchdog.observers import Observer
|
||||||
|
from watchdog.events import FileSystemEventHandler
|
||||||
|
|
||||||
|
# TODO: delete
|
||||||
|
# TODO: propagate json changes to msgpack automatically
|
||||||
|
# TODO: propagate msgpack changes to cache automatically
|
||||||
|
# TODO: Indexing
|
||||||
|
|
||||||
|
|
||||||
|
def _json_to_msg(path: str):
|
||||||
|
"""
|
||||||
|
Convert json at the path plus .json to a msgpack binary
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
Path to json minus the extension
|
||||||
|
"""
|
||||||
|
rpath = path + ".json"
|
||||||
|
res = b""
|
||||||
|
with open(rpath) as f:
|
||||||
|
res = msgpack.dumps(json.load(f))
|
||||||
|
with open(path, "wb") as f:
|
||||||
|
f.write(res)
|
||||||
|
|
||||||
|
|
||||||
|
class Daisy:
|
||||||
|
"""
|
||||||
|
Base class for Daisy data representation
|
||||||
|
|
||||||
|
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Components/daisy.py>`_
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
filepath: str
|
||||||
|
Path to file representation on disk
|
||||||
|
|
||||||
|
msg: dict
|
||||||
|
In memory representation
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
filepath: str,
|
||||||
|
templates: dict = {},
|
||||||
|
template: bool = False,
|
||||||
|
prefillDict: bool = False,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
filepath: str
|
||||||
|
Path to disk location
|
||||||
|
|
||||||
|
templates: dict
|
||||||
|
Dictionary of templates to Use
|
||||||
|
|
||||||
|
template: bool
|
||||||
|
Which template to Use
|
||||||
|
|
||||||
|
prefillDict: bool
|
||||||
|
Whether to fill the record with a template
|
||||||
|
"""
|
||||||
|
self.filepath = filepath
|
||||||
|
if os.path.exists(filepath) != True:
|
||||||
|
with open(filepath, "wb") as f:
|
||||||
|
if template != False:
|
||||||
|
if template in templates.keys():
|
||||||
|
t = templates[template].get()
|
||||||
|
if prefillDict != False:
|
||||||
|
for k in prefillDict.keys():
|
||||||
|
t[k] = prefillDict[k]
|
||||||
|
f.write(msgpack.dumps(t))
|
||||||
|
self.msg = t
|
||||||
|
else:
|
||||||
|
print("No such template as: " + template)
|
||||||
|
else:
|
||||||
|
f.write(msgpack.dumps({}))
|
||||||
|
self.msg = {}
|
||||||
|
elif os.path.isdir(filepath):
|
||||||
|
self.msg = "directory"
|
||||||
|
else:
|
||||||
|
with open(filepath, "rb") as f:
|
||||||
|
self.msg = msgpack.loads(f.read())
|
||||||
|
|
||||||
|
# Use override for updating
|
||||||
|
|
||||||
|
def write(
|
||||||
|
self,
|
||||||
|
override=False,
|
||||||
|
encrypt: bool = False,
|
||||||
|
encryptKey=None,
|
||||||
|
recur: bool = False,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Write record to disk
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
override
|
||||||
|
Either false or a dictionary of values to set on the record
|
||||||
|
|
||||||
|
encrypt: bool
|
||||||
|
Whether to encrypt the record (TODO)
|
||||||
|
|
||||||
|
encryptKey
|
||||||
|
Key to encrypt record with, or None if not set
|
||||||
|
|
||||||
|
recur: bool
|
||||||
|
Whether to recursively handle keys
|
||||||
|
"""
|
||||||
|
if override != False:
|
||||||
|
for key in override.keys():
|
||||||
|
# TODO: Deeper recursion
|
||||||
|
if recur:
|
||||||
|
if not key in self.msg.keys():
|
||||||
|
self.msg[key] = {}
|
||||||
|
for ikey in override[key].keys():
|
||||||
|
self.msg[key][ikey] = override[key][ikey]
|
||||||
|
else:
|
||||||
|
self.msg[key] = override[key]
|
||||||
|
data = msgpack.dumps(self.msg)
|
||||||
|
with open(self.filepath, "wb") as f:
|
||||||
|
f.write(data)
|
||||||
|
|
||||||
|
# Use for refreshing
|
||||||
|
|
||||||
|
def read(self, decrypt: bool = False, decryptKey=False):
|
||||||
|
"""
|
||||||
|
Read record from disk to memory
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
decrypt: bool
|
||||||
|
Whether to decrypt record
|
||||||
|
|
||||||
|
decryptKey
|
||||||
|
Key to decrypt record
|
||||||
|
"""
|
||||||
|
if os.path.isdir(self.filepath):
|
||||||
|
self.msg = "directory"
|
||||||
|
else:
|
||||||
|
with open(self.filepath, "rb") as f:
|
||||||
|
self.msg = msgpack.loads(f.read())
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
"""
|
||||||
|
Get record dictionary from memory
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
self.msg: dict
|
||||||
|
"""
|
||||||
|
return self.msg
|
||||||
|
|
||||||
|
def sublist(self):
|
||||||
|
"""
|
||||||
|
Lists contents of directory if object is a directory, otherwise return None
|
||||||
|
"""
|
||||||
|
fpath = self.filepath
|
||||||
|
if os.path.isdir(fpath):
|
||||||
|
return ["messages/" + x for x in os.listdir(fpath)]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def loadTemplates(templatePath: str = "templates"):
|
||||||
|
"""Load templates for prefilling records
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
templatePath: str
|
||||||
|
Path to templates
|
||||||
|
"""
|
||||||
|
templates = {}
|
||||||
|
for p in os.listdir(templatePath):
|
||||||
|
p = templatePath + "/" + p
|
||||||
|
if os.path.isdir(p):
|
||||||
|
for ip in os.listdir(p):
|
||||||
|
ip = p + "/" + ip
|
||||||
|
if os.path.isdir(ip):
|
||||||
|
print("Too deep, skipping: " + ip)
|
||||||
|
else:
|
||||||
|
templates[ip] = Daisy(ip)
|
||||||
|
else:
|
||||||
|
templates[p] = Daisy(p)
|
||||||
|
self.templates = templates
|
||||||
|
return templates
|
||||||
|
|
||||||
|
|
||||||
|
class CFSHandler(FileSystemEventHandler):
|
||||||
|
"""
|
||||||
|
File system watchdog that propagates disk changes to records to their proper cache
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache, isCatch: bool = False):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
cache: Cache
|
||||||
|
Daisy cache to update
|
||||||
|
|
||||||
|
isCatch: bool
|
||||||
|
Is the cache for catchs
|
||||||
|
"""
|
||||||
|
self.cache = cache
|
||||||
|
self.isCatch = isCatch
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
def on_any_event(self, event):
|
||||||
|
"""
|
||||||
|
Called when a CRUD operation is performed on a record file
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
event
|
||||||
|
Event object provided by watchdog
|
||||||
|
"""
|
||||||
|
if not (".json" in event.src_path):
|
||||||
|
if not (".md" in event.src_path):
|
||||||
|
tpath = "/".join(event.src_path.split("/")[1:])
|
||||||
|
if tpath != "":
|
||||||
|
if self.isCatch:
|
||||||
|
self.cache.sget(tpath)
|
||||||
|
else:
|
||||||
|
self.cache.get(tpath).get()
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Dumping to cacheFile
|
||||||
|
|
||||||
|
|
||||||
|
class Cache:
|
||||||
|
"""
|
||||||
|
In memory collection of Daisy records
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
filepaths=None,
|
||||||
|
cacheFile=None,
|
||||||
|
path: str = "daisy",
|
||||||
|
walk: bool = False,
|
||||||
|
isCatch: bool = False,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
filepaths
|
||||||
|
Either a list of filepaths to load or None
|
||||||
|
|
||||||
|
cacheFile
|
||||||
|
Path to a cache file which is a collection of paths to load
|
||||||
|
|
||||||
|
path: str
|
||||||
|
Path prefix to load records from
|
||||||
|
|
||||||
|
walk: bool
|
||||||
|
Whether to automatically walk the path and load records
|
||||||
|
|
||||||
|
isCatch: bool
|
||||||
|
Whether this cache is for catchs
|
||||||
|
"""
|
||||||
|
self.data = {}
|
||||||
|
self.path = path
|
||||||
|
self.event_handler = CFSHandler(self, isCatch=isCatch)
|
||||||
|
self.observer = Observer()
|
||||||
|
self.observer.schedule(self.event_handler, self.path, recursive=True)
|
||||||
|
self.observer.start()
|
||||||
|
# TODO: Test
|
||||||
|
|
||||||
|
if filepaths != None:
|
||||||
|
for fp in filepaths:
|
||||||
|
fp = path + "/" + fp
|
||||||
|
if os.path.isfile(fp):
|
||||||
|
self.data[fp] = Daisy(fp)
|
||||||
|
elif cacheFile != None:
|
||||||
|
with open(cacheFile, "r") as f:
|
||||||
|
for fp in f.read().split("\n"):
|
||||||
|
self.data[fp] = Daisy(fp)
|
||||||
|
elif walk:
|
||||||
|
for root, dirs, files in os.walk(self.path):
|
||||||
|
for p in dirs + files:
|
||||||
|
# print("walking")
|
||||||
|
if not (".json" in p):
|
||||||
|
if not (".md" in p):
|
||||||
|
tpath = root + "/" + p
|
||||||
|
# print(p)
|
||||||
|
# print(tpath)
|
||||||
|
self.data[tpath] = Daisy(tpath)
|
||||||
|
|
||||||
|
def create(self, path: str, data: dict):
|
||||||
|
"""
|
||||||
|
Create new record
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
Path to create record at
|
||||||
|
|
||||||
|
data: dict
|
||||||
|
Data to populate record with
|
||||||
|
"""
|
||||||
|
with open(self.path + "/" + path, "wb") as f:
|
||||||
|
f.write(msgpack.dumps(data))
|
||||||
|
logging.log(10, "Done creating record")
|
||||||
|
self.data[path] = Daisy(self.path + "/" + path)
|
||||||
|
logging.log(10, "Done loading to Daisy")
|
||||||
|
return self.data[path]
|
||||||
|
|
||||||
|
def get(self, path: str):
|
||||||
|
"""
|
||||||
|
Get record at path, else return False
|
||||||
|
|
||||||
|
path: str
|
||||||
|
Path of record
|
||||||
|
"""
|
||||||
|
if path in self.data.keys():
|
||||||
|
return self.data[path]
|
||||||
|
else:
|
||||||
|
if os.path.exists(self.path + "/" + path):
|
||||||
|
self.data[path] = Daisy(self.path + "/" + path)
|
||||||
|
return self.data[path]
|
||||||
|
else:
|
||||||
|
logging.log(10, "File does not exist")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def refresh(self):
|
||||||
|
"""
|
||||||
|
Reload from disk to memory
|
||||||
|
"""
|
||||||
|
for key in self.data.keys():
|
||||||
|
self.data[key].read()
|
||||||
|
|
||||||
|
def search(self, keydict: dict, strict: bool = True):
|
||||||
|
"""
|
||||||
|
Search cache for record for records with values
|
||||||
|
|
||||||
|
keydict: dict
|
||||||
|
Values to search for
|
||||||
|
|
||||||
|
strict: bool
|
||||||
|
Whether to require values match
|
||||||
|
"""
|
||||||
|
results = []
|
||||||
|
for key, val in self.data.items():
|
||||||
|
val = val.get()
|
||||||
|
if strict and type(val) != str:
|
||||||
|
addcheck = False
|
||||||
|
for k, v in keydict.items():
|
||||||
|
if k in val.keys():
|
||||||
|
if v in val[k]:
|
||||||
|
addcheck = True
|
||||||
|
else:
|
||||||
|
addcheck = False
|
||||||
|
break
|
||||||
|
if addcheck:
|
||||||
|
results.append([key, val])
|
||||||
|
elif type(val) != str:
|
||||||
|
for k, v in keydict.items():
|
||||||
|
if k in val.keys():
|
||||||
|
if v in val[k]:
|
||||||
|
results.append([key, val])
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
class Catch(Cache):
|
||||||
|
"""
|
||||||
|
Sub class of Cache for handling catchs
|
||||||
|
|
||||||
|
.. image:: https://git.utopic.work/PierMesh/piermesh/raw/branch/main/imgs/catchdisplay.png
|
||||||
|
"""
|
||||||
|
|
||||||
|
catches = {}
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, path: str = "catch", filepaths=None, catchFile=None, walk: bool = False
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Basically the same initialization parameters as Catch
|
||||||
|
"""
|
||||||
|
super().__init__(
|
||||||
|
filepaths=filepaths, cacheFile=catchFile, path=path, walk=walk, isCatch=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: Fins
|
||||||
|
|
||||||
|
def sget(self, path: str):
|
||||||
|
"""
|
||||||
|
Call Cache's get to get record
|
||||||
|
"""
|
||||||
|
return super().get(path)
|
||||||
|
|
||||||
|
def get(self, head: str, tail: str, fins=None):
|
||||||
|
"""
|
||||||
|
Get catch by pieces
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
head: str
|
||||||
|
First part of catch (maximum: 4 characters)
|
||||||
|
|
||||||
|
tail: str
|
||||||
|
Second part of catch (maximum: 16 characters)
|
||||||
|
|
||||||
|
fins
|
||||||
|
List of (maximum 8 characters) strings at the end of the catch oe None if none
|
||||||
|
"""
|
||||||
|
r = self.search({"head": head, "tail": tail})
|
||||||
|
return r[0][1]["html"]
|
||||||
|
|
||||||
|
def addc(self, peer, node, seperator, head, tail, data, fins=None):
|
||||||
|
tnpath = "catch/" + node
|
||||||
|
if os.path.exists(tnpath) != True:
|
||||||
|
os.makedirs(tnpath)
|
||||||
|
tppath = tnpath + "/" + peer
|
||||||
|
if os.path.exists(tppath) != True:
|
||||||
|
os.makedirs(tppath)
|
||||||
|
sid = str(random.randrange(0, 999999)).zfill(6)
|
||||||
|
data["seperator"] = seperator
|
||||||
|
data["head"] = head
|
||||||
|
data["tail"] = tail
|
||||||
|
if fins != None:
|
||||||
|
data["fins"] = fins
|
||||||
|
res = self.create("{0}/{1}/{2}".format(node, peer, sid), data)
|
||||||
|
return [sid, res]
|
||||||
|
|
||||||
|
|
||||||
|
class Store(Daisy):
|
||||||
|
def __init__(self, store: str, path: str, nodeNickname: str):
|
||||||
|
fpath = "daisy/{0}/{1}".format(path, nodeNickname)):
|
||||||
|
cpath = "{0}/{1}/{2}".format(path, nodeNickname, store)
|
||||||
|
if not os.path.exists(fpath):
|
||||||
|
os.mkdir(fpath)
|
||||||
|
super().__init__("daisy/" + cpath)
|
||||||
|
|
||||||
|
def update(self, entry: str, data, recur: bool=True):
|
||||||
|
if recur:
|
||||||
|
for key in data.keys():
|
||||||
|
self.msg[entry][key] = data[key]
|
||||||
|
else:
|
||||||
|
self.msg[entry] = data
|
||||||
|
self.write()
|
||||||
|
|
||||||
|
def getRecord(self, key: str):
|
||||||
|
if key in self.get().keys():
|
||||||
|
return self.get()[key]
|
||||||
|
else:
|
||||||
|
self.cLog(20, "Record does not exist")
|
||||||
|
return False
|
||||||
|
|