Merge pull request 'First push' (#72) from nightly into main
Reviewed-on: #72
|
@ -1,92 +0,0 @@
|
|||
![Daisy logo](https://git.utopic.work/PierMesh/piermesh/raw/branch/main/imgs/daisydisplay.png)
|
||||
|
||||
# Schemaless binary database
|
||||
|
||||
### *class* Components.daisy.Daisy(filepath: str, templates: dict = {}, template: bool = False, prefillDict: bool = False)
|
||||
|
||||
Base class for Daisy data representation
|
||||
|
||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/Components/daisy.py)
|
||||
|
||||
#### get()
|
||||
|
||||
Get record dictionary from memory
|
||||
|
||||
* **Returns:**
|
||||
**self.msg**
|
||||
* **Return type:**
|
||||
dict
|
||||
|
||||
#### read(decrypt: bool = False, decryptKey=False)
|
||||
|
||||
Read record from disk to memory
|
||||
|
||||
* **Parameters:**
|
||||
* **decrypt** (*bool*) – Whether to decrypt record
|
||||
* **decryptKey** – Key to decrypt record
|
||||
|
||||
#### sublist()
|
||||
|
||||
Lists contents of directory if object is a directory, otherwise return None
|
||||
|
||||
#### write(override=False, encrypt: bool = False, encryptKey=None, recur: bool = False)
|
||||
|
||||
Write record to disk
|
||||
|
||||
* **Parameters:**
|
||||
* **override** – Either false or a dictionary of values to set on the record
|
||||
* **encrypt** (*bool*) – Whether to encrypt the record (TODO)
|
||||
* **encryptKey** – Key to encrypt record with, or None if not set
|
||||
* **recur** (*bool*) – Whether to recursively handle keys
|
||||
|
||||
### *class* Components.daisy.Cache(filepaths=None, cacheFile=None, path: str = 'daisy', walk: bool = False, isCatch: bool = False)
|
||||
|
||||
In memory collection of Daisy records
|
||||
|
||||
#### create(path: str, data: dict)
|
||||
|
||||
Create new record
|
||||
|
||||
* **Parameters:**
|
||||
* **path** (*str*) – Path to create record at
|
||||
* **data** (*dict*) – Data to populate record with
|
||||
|
||||
#### get(path: str)
|
||||
|
||||
Get record at path, else return False
|
||||
|
||||
path: str
|
||||
: Path of record
|
||||
|
||||
#### refresh()
|
||||
|
||||
Reload from disk to memory
|
||||
|
||||
#### search(keydict: dict, strict: bool = True)
|
||||
|
||||
Search cache for record for records with values
|
||||
|
||||
keydict: dict
|
||||
: Values to search for
|
||||
|
||||
strict: bool
|
||||
: Whether to require values match
|
||||
|
||||
### *class* Components.daisy.Catch(path: str = 'catch', filepaths=None, catchFile=None, walk: bool = False)
|
||||
|
||||
Sub class of Cache for handling catchs
|
||||
|
||||
![image](https://git.utopic.work/PierMesh/piermesh/raw/branch/main/imgs/catchdisplay.png)
|
||||
|
||||
#### get(head: str, tail: str, fins=None)
|
||||
|
||||
Get catch by pieces
|
||||
|
||||
* **Parameters:**
|
||||
* **head** (*str*) – First part of catch (maximum: 4 characters)
|
||||
* **tail** (*str*) – Second part of catch (maximum: 16 characters)
|
||||
* **fins** – List of (maximum 8 characters) strings at the end of the catch oe None if none
|
||||
|
||||
#### sget(path: str)
|
||||
|
||||
Call Cache’s get to get record
|
|
@ -1,4 +1,4 @@
|
|||
# Small internet interop utilities
|
||||
# hopper: Small internet interop utilities
|
||||
|
||||
### Components.hopper.get(url: str, params=None)
|
||||
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
Diffie hellman ephemeral
|
||||
Fernet based encryption
|
||||
==========================
|
||||
|
||||
### *class* Cryptography.DHEFern.DHEFern(cache, nodeNickname, cLog)
|
|
@ -1,3 +1,62 @@
|
|||
# Header packet: Metadata packet
|
||||
|
||||
### *class* Packets.HeaderPacket.Header(packetsID, packetCount, sender, senderDisplayName, recipient, recipientNode, json=True, fname=False, subpacket=False, wantFullResponse=False, mimeType=-1, protocol=None, packetsClass=0)
|
||||
### *class* Packets.HeaderPacket.Header(packetsID: int, packetCount: int, sender: int, senderDisplayName: int, recipient: int, recipientNode: int, subpacket: bool = False, wantFullResponse: bool = False, packetsClass: int = 0, pAction: int = -1)
|
||||
|
||||
Metadata packet for messages
|
||||
|
||||
#### sender
|
||||
|
||||
6 digit (maximum) node or peer ID
|
||||
|
||||
* **Type:**
|
||||
int
|
||||
|
||||
#### senderDisplayName
|
||||
|
||||
3 digit (maximum) ID for mapping display names to a given user
|
||||
|
||||
* **Type:**
|
||||
int
|
||||
|
||||
#### recipient
|
||||
|
||||
6 digit (maximum) node or peer ID
|
||||
|
||||
* **Type:**
|
||||
int
|
||||
|
||||
#### recipientNode
|
||||
|
||||
6 digit (maximum) node ID to route the packet to
|
||||
|
||||
* **Type:**
|
||||
int
|
||||
|
||||
#### subpacket
|
||||
|
||||
Whether this is a subpacket
|
||||
|
||||
* **Type:**
|
||||
bool
|
||||
|
||||
#### wantFullResponse
|
||||
|
||||
Whether a response should be sent when the message completes reception (TODO)
|
||||
|
||||
* **Type:**
|
||||
bool
|
||||
|
||||
#### pAction
|
||||
|
||||
3 digit (maximum) pAction ID for mapping precise actions within a protocol (TODO)
|
||||
|
||||
* **Type:**
|
||||
int
|
||||
|
||||
#### dump()
|
||||
|
||||
Dump packet to msgpack encoded binary for transmission
|
||||
|
||||
#### usePreset(path: str)
|
||||
|
||||
Add preset fields to the packet
|
||||
|
|
|
@ -1,3 +1,45 @@
|
|||
# Base packet
|
||||
|
||||
### *class* Packets.Packet.Packet(data, packetsID=False, packetNumber=False, packetCount=1, packetsClass=-1)
|
||||
### *class* Packets.Packet.Packet(data: bytes, packetsID: int = -1, packetNumber=False, packetCount: int = 1, packetsClass: int = -1)
|
||||
|
||||
Base class for Packets
|
||||
|
||||
#### data
|
||||
|
||||
Data part this packet is delivering (usually none when creating a header packet)
|
||||
|
||||
* **Type:**
|
||||
bytes
|
||||
|
||||
#### packetsID
|
||||
|
||||
6 digit (maximum) ID for parent Message (TODO: Change to MessageID)
|
||||
|
||||
* **Type:**
|
||||
int
|
||||
|
||||
#### packetNumber
|
||||
|
||||
Index of packet or False
|
||||
|
||||
#### packetCount
|
||||
|
||||
Number of packets in parent Message
|
||||
|
||||
* **Type:**
|
||||
int
|
||||
|
||||
#### packetsClass
|
||||
|
||||
3 digit (maximum) class/protocol for message (TODO: Chajge to messageProtocol)
|
||||
|
||||
* **Type:**
|
||||
int
|
||||
|
||||
#### dump()
|
||||
|
||||
Dump packet to msgpack encoded binary data
|
||||
|
||||
#### parsePayload()
|
||||
|
||||
Parse bytes to Packet (TODO: Fix)
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
Packets representation for full
|
||||
message
|
||||
===============================
|
||||
|
||||
### *class* Packets.Packets.Packets(bytesObject, sender, senderDisplayName, recipient, recipientNode, dataSize=128, wantFullResponse=False, packetsClass=None)
|
|
@ -3,3 +3,7 @@ for very low data applications
|
|||
===============================
|
||||
|
||||
### *class* Packets.SinglePacket.SinglePacket(data, packetsID, packetsClass=None, cache=None)
|
||||
|
||||
WIP
|
||||
|
||||
Single packet for very small protocols
|
||||
|
|
|
@ -4,18 +4,59 @@ Dispatches to Protocols
|
|||
|
||||
### *class* Sponge.base.Filter(cache, onodeID, todo, cLog)
|
||||
|
||||
Packet filtering orchestration
|
||||
|
||||
cLog
|
||||
: Reference to run.Node.cLog for logging
|
||||
|
||||
cache: Daisy.Cache.Cache
|
||||
: Reference to our Daisy Cache instance
|
||||
|
||||
completed: list
|
||||
: List of completed messages IDs
|
||||
|
||||
todo
|
||||
: Reference to list of actions to do in the Node
|
||||
|
||||
onodeID
|
||||
: PierMesh node ID
|
||||
|
||||
#### cache
|
||||
|
||||
Messages is temporary storage for unfinished messages
|
||||
|
||||
#### *async* protoRoute(completeMessage)
|
||||
#### mCheck(payload: bytes)
|
||||
|
||||
Shorthand reference
|
||||
Check if payload bytes are msgpack encoded, otherwise skip
|
||||
|
||||
#### *async* protoMap(protocolID: int)
|
||||
|
||||
Get protocol from protocol ID using the mlookup table
|
||||
|
||||
#### *async* protoRoute(completeMessage: dict)
|
||||
|
||||
Route message to proper protocol handler
|
||||
|
||||
#### selfCheck(packet)
|
||||
|
||||
Check if this is a self packet, if so skip
|
||||
|
||||
#### *async* sieve(packet)
|
||||
|
||||
Base filtering logic, takes a single MeshTastic packet
|
||||
|
||||
### *class* Sponge.Protocols.bubble.filter(completeMessage, recipient, recipientNode, onodeID, todo)
|
||||
|
||||
Peer to peer protol
|
||||
|
||||
### *class* Sponge.Protocols.catch.filter(completeMessage, recipient, recipientNode, todo)
|
||||
|
||||
Catch exchange protocol
|
||||
|
||||
### *class* Sponge.Protocols.cryptography.filter(completeMessage, recipientNode, todo)
|
||||
|
||||
Cryptographic operations protocol
|
||||
|
||||
### *class* Sponge.Protocols.map.filter(completeMessage, todo)
|
||||
|
||||
Network mapping protocol
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
# Layer 0 data transmission
|
||||
|
||||
### *class* Transmission.transmission.Transmitter(device, filter, onodeID, cache, catch, cryptographyInfo, cLog)
|
116
docs/readme.md
|
@ -7,7 +7,7 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
|||
|
||||
# Contents:
|
||||
|
||||
* [PierMesh service runner](/PierMesh/piermesh/src/branch/main/docs/run.md)
|
||||
* [run: PierMesh service runner](/PierMesh/piermesh/src/branch/main/docs/run.md)
|
||||
* [`Node`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node)
|
||||
* [`Node.toLog`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.toLog)
|
||||
* [`Node.actions`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.actions)
|
||||
|
@ -29,7 +29,7 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
|||
* [`Node.cLog()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.cLog)
|
||||
* [`Node.monitor()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.monitor)
|
||||
* [`Node.spongeListen()`](/PierMesh/piermesh/src/branch/main/docs/run.md#run.Node.spongeListen)
|
||||
* [TUI application](/PierMesh/piermesh/src/branch/main/docs/ui.md)
|
||||
* [ui: TUI application](/PierMesh/piermesh/src/branch/main/docs/ui.md)
|
||||
* [`TUI`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI)
|
||||
* [`TUI.visibleLogo`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.visibleLogo)
|
||||
* [`TUI.nodeOb`](/PierMesh/piermesh/src/branch/main/docs/ui.md#ui.TUI.nodeOb)
|
||||
|
@ -59,36 +59,110 @@ sphinx-quickstart on Fri Jul 26 23:30:55 2024. -->
|
|||
* [`Network.getRoute()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.getRoute)
|
||||
* [`Network.mimport()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.mimport)
|
||||
* [`Network.render()`](/PierMesh/piermesh/src/branch/main/docs/Siph/map.md#Siph.map.Network.render)
|
||||
* [Schemaless binary database](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md)
|
||||
* [`Daisy`](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md#Components.daisy.Daisy)
|
||||
* [`Daisy.get()`](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md#Components.daisy.Daisy.get)
|
||||
* [`Daisy.read()`](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md#Components.daisy.Daisy.read)
|
||||
* [`Daisy.sublist()`](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md#Components.daisy.Daisy.sublist)
|
||||
* [`Daisy.write()`](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md#Components.daisy.Daisy.write)
|
||||
* [`Cache`](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md#Components.daisy.Cache)
|
||||
* [`Cache.create()`](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md#Components.daisy.Cache.create)
|
||||
* [`Cache.get()`](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md#Components.daisy.Cache.get)
|
||||
* [`Cache.refresh()`](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md#Components.daisy.Cache.refresh)
|
||||
* [`Cache.search()`](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md#Components.daisy.Cache.search)
|
||||
* [`Catch`](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md#Components.daisy.Catch)
|
||||
* [`Catch.get()`](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md#Components.daisy.Catch.get)
|
||||
* [`Catch.sget()`](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md#Components.daisy.Catch.sget)
|
||||
* [Small internet interop utilities](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md)
|
||||
* [hopper: Small internet interop utilities](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md)
|
||||
* [`get()`](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md#Components.hopper.get)
|
||||
* [`post()`](/PierMesh/piermesh/src/branch/main/docs/Components/hopper.md#Components.hopper.post)
|
||||
* [`DHEFern`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/DHEFern.md)
|
||||
* [Daisy based cache](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md)
|
||||
* [`Cache`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md#Daisy.Cache.Cache)
|
||||
* [`Cache.create()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md#Daisy.Cache.Cache.create)
|
||||
* [`Cache.get()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md#Daisy.Cache.Cache.get)
|
||||
* [`Cache.refresh()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md#Daisy.Cache.Cache.refresh)
|
||||
* [`Cache.search()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Cache.md#Daisy.Cache.Cache.search)
|
||||
* [`Catch`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md)
|
||||
* [`Catch.get()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch.get)
|
||||
* [`Catch.sget()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Catch.md#Daisy.Catch.Catch.sget)
|
||||
* [`Daisy`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md)
|
||||
* [`Daisy.filepath`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.filepath)
|
||||
* [`Daisy.msg`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.msg)
|
||||
* [`Daisy.get()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.get)
|
||||
* [`Daisy.read()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.read)
|
||||
* [`Daisy.sublist()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.sublist)
|
||||
* [`Daisy.write()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy.write)
|
||||
* [Daisy signal management](/PierMesh/piermesh/src/branch/main/docs/Daisy/Soil.md)
|
||||
* [`Compound`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Soil.md#Daisy.Soil.Compound)
|
||||
* [`Compound.on_any_event()`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Soil.md#Daisy.Soil.Compound.on_any_event)
|
||||
* [`Store`](/PierMesh/piermesh/src/branch/main/docs/Daisy/Store.md)
|
||||
* [`DHEFern`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md)
|
||||
* [`DHEFern.cLog`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.cLog)
|
||||
* [`DHEFern.loadedParams`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.loadedParams)
|
||||
* [`DHEFern.loadedKeys`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.loadedKeys)
|
||||
* [`DHEFern.nodeNickname`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.nodeNickname)
|
||||
* [`DHEFern.cache`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.cache)
|
||||
* [`DHEFern.publicKey`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.publicKey)
|
||||
* [`DHEFern.privateKey`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.privateKey)
|
||||
* [`DHEFern.checkInMem()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.checkInMem)
|
||||
* [`DHEFern.decrypt()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.decrypt)
|
||||
* [`DHEFern.encrypt()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.encrypt)
|
||||
* [`DHEFern.genKeyPair()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.genKeyPair)
|
||||
* [`DHEFern.genParams()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.genParams)
|
||||
* [`DHEFern.getParamsBytes()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.getParamsBytes)
|
||||
* [`DHEFern.getRecord()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.getRecord)
|
||||
* [`DHEFern.getSalt()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.getSalt)
|
||||
* [`DHEFern.initStore()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.initStore)
|
||||
* [`DHEFern.keyDerive()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.keyDerive)
|
||||
* [`DHEFern.loadParamBytes()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.loadParamBytes)
|
||||
* [`DHEFern.loadRecordToMem()`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/WhaleSong.md#Cryptography.WhaleSong.DHEFern.loadRecordToMem)
|
||||
* [`Filter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md)
|
||||
* [`Filter.cache`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.cache)
|
||||
* [`Filter.mCheck()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.mCheck)
|
||||
* [`Filter.protoMap()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.protoMap)
|
||||
* [`Filter.protoRoute()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.protoRoute)
|
||||
* [`Filter.selfCheck()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.selfCheck)
|
||||
* [`Filter.sieve()`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.base.Filter.sieve)
|
||||
* [`filter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.bubble.filter)
|
||||
* [`filter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.catch.filter)
|
||||
* [`filter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.cryptography.filter)
|
||||
* [`filter`](/PierMesh/piermesh/src/branch/main/docs/Sponge/base.md#Sponge.Protocols.map.filter)
|
||||
* [Header packet: Metadata packet](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md)
|
||||
* [`Header`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header)
|
||||
* [`Header.sender`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.sender)
|
||||
* [`Header.senderDisplayName`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.senderDisplayName)
|
||||
* [`Header.recipient`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.recipient)
|
||||
* [`Header.recipientNode`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.recipientNode)
|
||||
* [`Header.subpacket`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.subpacket)
|
||||
* [`Header.wantFullResponse`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.wantFullResponse)
|
||||
* [`Header.pAction`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.pAction)
|
||||
* [`Header.dump()`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.dump)
|
||||
* [`Header.usePreset()`](/PierMesh/piermesh/src/branch/main/docs/Packets/HeaderPacket.md#Packets.HeaderPacket.Header.usePreset)
|
||||
* [Base packet](/PierMesh/piermesh/src/branch/main/docs/Packets/Packet.md)
|
||||
* [`Packet`](/PierMesh/piermesh/src/branch/main/docs/Packets/Packet.md#Packets.Packet.Packet)
|
||||
* [`Packets`](/PierMesh/piermesh/src/branch/main/docs/Packets/Packets.md)
|
||||
* [`Packet.data`](/PierMesh/piermesh/src/branch/main/docs/Packets/Packet.md#Packets.Packet.Packet.data)
|
||||
* [`Packet.packetsID`](/PierMesh/piermesh/src/branch/main/docs/Packets/Packet.md#Packets.Packet.Packet.packetsID)
|
||||
* [`Packet.packetNumber`](/PierMesh/piermesh/src/branch/main/docs/Packets/Packet.md#Packets.Packet.Packet.packetNumber)
|
||||
* [`Packet.packetCount`](/PierMesh/piermesh/src/branch/main/docs/Packets/Packet.md#Packets.Packet.Packet.packetCount)
|
||||
* [`Packet.packetsClass`](/PierMesh/piermesh/src/branch/main/docs/Packets/Packet.md#Packets.Packet.Packet.packetsClass)
|
||||
* [`Packet.dump()`](/PierMesh/piermesh/src/branch/main/docs/Packets/Packet.md#Packets.Packet.Packet.dump)
|
||||
* [`Packet.parsePayload()`](/PierMesh/piermesh/src/branch/main/docs/Packets/Packet.md#Packets.Packet.Packet.parsePayload)
|
||||
* [`SinglePacket`](/PierMesh/piermesh/src/branch/main/docs/Packets/SinglePacket.md)
|
||||
* [Layer 0 data transmission](/PierMesh/piermesh/src/branch/main/docs/Transmission/transmission.md)
|
||||
* [`Transmitter`](/PierMesh/piermesh/src/branch/main/docs/Transmission/transmission.md#Transmission.transmission.Transmitter)
|
||||
* [Layer 0 data transceiving](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md)
|
||||
* [`Transceiver`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver)
|
||||
* [`Transceiver.cLog`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.cLog)
|
||||
* [`Transceiver.cryptographyInfo`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.cryptographyInfo)
|
||||
* [`Transceiver.filter`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.filter)
|
||||
* [`Transceiver.tcache`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.tcache)
|
||||
* [`Transceiver.tcatch`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.tcatch)
|
||||
* [`Transceiver.notConnected`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.notConnected)
|
||||
* [`Transceiver.acks`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.acks)
|
||||
* [`Transceiver.onodeID`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.onodeID)
|
||||
* [`Transceiver.messages`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.messages)
|
||||
* [`Transceiver.addPackets()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.addPackets)
|
||||
* [`Transceiver.announce()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.announce)
|
||||
* [`Transceiver.awaitFullResponse()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.awaitFullResponse)
|
||||
* [`Transceiver.awaitResponse()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.awaitResponse)
|
||||
* [`Transceiver.initNodeDH()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.initNodeDH)
|
||||
* [`Transceiver.onConnection()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.onConnection)
|
||||
* [`Transceiver.onReceive()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.onReceive)
|
||||
* [`Transceiver.progressCheck()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.progressCheck)
|
||||
* [`Transceiver.responseCheck()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.responseCheck)
|
||||
* [`Transceiver.send()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.send)
|
||||
* [`Transceiver.sendAnnounce()`](/PierMesh/piermesh/src/branch/main/docs/Transceiver/Transceiver.md#Transceiver.Transceiver.Transceiver.sendAnnounce)
|
||||
* [serve: Web UI server](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md)
|
||||
* [`Server`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server)
|
||||
* [`Server.cLog`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.cLog)
|
||||
* [`Server.transmitter`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.transmitter)
|
||||
* [`Server.network`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.network)
|
||||
* [`Server.nodeID`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.nodeID)
|
||||
* [`Server.peerIDs`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.peerIDs)
|
||||
* [`Server.app`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.app)
|
||||
* [`Server.catch`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.catch)
|
||||
* [`Server.sendToPeer()`](/PierMesh/piermesh/src/branch/main/docs/Splash/serve.md#Splash.serve.Server.sendToPeer)
|
||||
|
|
20
docs/run.md
|
@ -1,10 +1,8 @@
|
|||
# PierMesh service runner
|
||||
|
||||
Main method for running the PierMesh service
|
||||
# run: PierMesh service runner
|
||||
|
||||
### *class* run.Node
|
||||
|
||||
Node: Class that handles most of the PierMesh data
|
||||
Class that handles most of the PierMesh data
|
||||
|
||||
[🔗 Source](https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/run.py)
|
||||
|
||||
|
@ -55,7 +53,7 @@ Daisy cache for general use
|
|||
Daisy (Components.daisy.Daisy) record containing some information about the node
|
||||
|
||||
* **Type:**
|
||||
[Daisy](/PierMesh/piermesh/src/branch/main/docs/Components/daisy.md#Components.daisy.Daisy)
|
||||
[Daisy](/PierMesh/piermesh/src/branch/main/docs/Daisy/Daisy.md#Daisy.Daisy.Daisy)
|
||||
|
||||
#### onodeID
|
||||
|
||||
|
@ -69,7 +67,7 @@ PierMesh node ID
|
|||
LoRa transmitter Transmitter
|
||||
|
||||
* **Type:**
|
||||
[Transmitter](/PierMesh/piermesh/src/branch/main/docs/Transmission/transmission.md#Transmission.transmission.Transmitter)
|
||||
Transmitter
|
||||
|
||||
#### processed
|
||||
|
||||
|
@ -101,7 +99,7 @@ Dictionary of PierMesh service tasks
|
|||
Initialize diffie hellman key exchange
|
||||
|
||||
#### SEE ALSO
|
||||
[`Cryptography.DHEFern.DHEFern`](/PierMesh/piermesh/src/branch/main/docs/Cryptography/DHEFern.md#Cryptography.DHEFern.DHEFern)
|
||||
`Cryptography.DHEFern.DHEFern`
|
||||
: End to end encryption functionality
|
||||
|
||||
#### *async* action_keyDeriveDH(data: dict)
|
||||
|
@ -120,10 +118,6 @@ Map new network data to internal network map
|
|||
|
||||
Get catch and return the data to a peer
|
||||
|
||||
#### SEE ALSO
|
||||
`Bubble.router.Router`
|
||||
: Routing class
|
||||
|
||||
#### *async* action_sendToPeer(data: dict)
|
||||
|
||||
Send data to a peer connected to the server
|
||||
|
@ -132,7 +126,7 @@ Send data to a peer connected to the server
|
|||
**data** (*dict*) – Data passed from the filter, this is a generic object so it’s similar on all actions here
|
||||
|
||||
#### SEE ALSO
|
||||
`Filters.Protocols`
|
||||
`Sponge.Protocols`
|
||||
: Protocol based packet filtering
|
||||
|
||||
`webui.serve.Server`
|
||||
|
@ -160,7 +154,7 @@ Monitor and log ram and cpu usage
|
|||
Loop to watch for tasks to do
|
||||
|
||||
#### SEE ALSO
|
||||
`Filters.base.sieve`
|
||||
`Sponge.base.sieve`
|
||||
: Packet filtering/parsing
|
||||
|
||||
### Notes
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# TUI application
|
||||
# ui: TUI application
|
||||
|
||||
### *class* ui.TUI(driver_class: Type[Driver] | None = None, css_path: str | PurePath | List[str | PurePath] | None = None, watch_css: bool = False)
|
||||
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
import random
|
||||
|
||||
class Client:
|
||||
def __init__(self, nodeID, permissions=False):
|
||||
self.cid = random.randrange(1, 1000000)
|
||||
self.nodeID = nodeID
|
||||
|
|
@ -1,423 +0,0 @@
|
|||
import os
|
||||
import json
|
||||
import msgpack
|
||||
import Cryptography
|
||||
import random
|
||||
from watchdog.observers import Observer
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
import logging
|
||||
|
||||
# TODO: delete
|
||||
# TODO: propagate json changes to msgpack automatically
|
||||
# TODO: propagate msgpack changes to cache automatically
|
||||
# TODO: Indexing
|
||||
|
||||
|
||||
def _json_to_msg(path: str):
|
||||
"""
|
||||
Convert json at the path plus .json to a msgpack binary
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path: str
|
||||
Path to json minus the extension
|
||||
"""
|
||||
rpath = path + ".json"
|
||||
res = ""
|
||||
with open(rpath) as f:
|
||||
res = msgpack.dumps(json.load(f))
|
||||
with open(path, "wb") as f:
|
||||
f.write(res)
|
||||
|
||||
|
||||
class Daisy:
|
||||
"""
|
||||
Base class for Daisy data representation
|
||||
|
||||
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Components/daisy.py>`_
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
filepath: str,
|
||||
templates: dict = {},
|
||||
template: bool = False,
|
||||
prefillDict: bool = False,
|
||||
):
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
filepath: str
|
||||
Path to disk location
|
||||
|
||||
templates: dict
|
||||
Dictionary of templates to Use
|
||||
|
||||
template: bool
|
||||
Which template to Use
|
||||
|
||||
prefillDict: bool
|
||||
Whether to fill the record with a template
|
||||
"""
|
||||
self.filepath = filepath
|
||||
if os.path.exists(filepath) != True:
|
||||
with open(filepath, "wb") as f:
|
||||
if template != False:
|
||||
if template in templates.keys():
|
||||
t = templates[template].get()
|
||||
if prefillDict != False:
|
||||
for k in prefillDict.keys():
|
||||
t[k] = prefillDict[k]
|
||||
f.write(msgpack.dumps(t))
|
||||
self.msg = t
|
||||
else:
|
||||
print("No such template as: " + template)
|
||||
else:
|
||||
f.write(msgpack.dumps({}))
|
||||
self.msg = {}
|
||||
elif os.path.isdir(filepath):
|
||||
self.msg = "directory"
|
||||
else:
|
||||
with open(filepath, "rb") as f:
|
||||
self.msg = msgpack.loads(f.read())
|
||||
|
||||
# Use override for updating
|
||||
|
||||
def write(
|
||||
self,
|
||||
override=False,
|
||||
encrypt: bool = False,
|
||||
encryptKey=None,
|
||||
recur: bool = False,
|
||||
):
|
||||
"""
|
||||
Write record to disk
|
||||
|
||||
Parameters
|
||||
----------
|
||||
override
|
||||
Either false or a dictionary of values to set on the record
|
||||
|
||||
encrypt: bool
|
||||
Whether to encrypt the record (TODO)
|
||||
|
||||
encryptKey
|
||||
Key to encrypt record with, or None if not set
|
||||
|
||||
recur: bool
|
||||
Whether to recursively handle keys
|
||||
"""
|
||||
if override != False:
|
||||
for key in override.keys():
|
||||
# TODO: Deeper recursion
|
||||
if recur:
|
||||
if not key in self.msg.keys():
|
||||
self.msg[key] = {}
|
||||
for ikey in override[key].keys():
|
||||
self.msg[key][ikey] = override[key][ikey]
|
||||
else:
|
||||
self.msg[key] = override[key]
|
||||
data = msgpack.dumps(self.msg)
|
||||
with open(self.filepath, "wb") as f:
|
||||
f.write(data)
|
||||
|
||||
# Use for refreshing
|
||||
|
||||
def read(self, decrypt: bool = False, decryptKey=False):
|
||||
"""
|
||||
Read record from disk to memory
|
||||
|
||||
Parameters
|
||||
----------
|
||||
decrypt: bool
|
||||
Whether to decrypt record
|
||||
|
||||
decryptKey
|
||||
Key to decrypt record
|
||||
"""
|
||||
if os.path.isdir(self.filepath):
|
||||
self.msg = "directory"
|
||||
else:
|
||||
with open(self.filepath, "rb") as f:
|
||||
self.msg = msgpack.loads(f.read())
|
||||
|
||||
def get(self):
|
||||
"""
|
||||
Get record dictionary from memory
|
||||
|
||||
Returns
|
||||
-------
|
||||
self.msg: dict
|
||||
"""
|
||||
return self.msg
|
||||
|
||||
def sublist(self):
|
||||
"""
|
||||
Lists contents of directory if object is a directory, otherwise return None
|
||||
"""
|
||||
fpath = self.filepath
|
||||
if os.path.isdir(fpath):
|
||||
return ["messages/" + x for x in os.listdir(fpath)]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def loadTemplates(templatePath: str = "templates"):
|
||||
"""Load templates for prefilling records
|
||||
|
||||
Parameters
|
||||
----------
|
||||
templatePath: str
|
||||
Path to templates
|
||||
"""
|
||||
templates = {}
|
||||
for p in os.listdir(templatePath):
|
||||
p = templatePath + "/" + p
|
||||
if os.path.isdir(p):
|
||||
for ip in os.listdir(p):
|
||||
ip = p + "/" + ip
|
||||
if os.path.isdir(ip):
|
||||
print("Too deep, skipping: " + ip)
|
||||
else:
|
||||
templates[ip] = Daisy(ip)
|
||||
else:
|
||||
templates[p] = Daisy(p)
|
||||
self.templates = templates
|
||||
return templates
|
||||
|
||||
|
||||
class CFSHandler(FileSystemEventHandler):
|
||||
"""
|
||||
File system watchdog that propagates disk changes to records to their proper cache
|
||||
"""
|
||||
|
||||
def __init__(self, cache, isCatch: bool = False):
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
cache: Cache
|
||||
Daisy cache to update
|
||||
|
||||
isCatch: bool
|
||||
Is the cache for catchs
|
||||
"""
|
||||
self.cache = cache
|
||||
self.isCatch = isCatch
|
||||
super().__init__()
|
||||
|
||||
def on_any_event(self, event):
|
||||
"""
|
||||
Called when a CRUD operation is performed on a record file
|
||||
|
||||
Parameters
|
||||
----------
|
||||
event
|
||||
Event object provided by watchdog
|
||||
"""
|
||||
if not (".json" in event.src_path):
|
||||
if not (".md" in event.src_path):
|
||||
tpath = "/".join(event.src_path.split("/")[1:])
|
||||
if tpath != "":
|
||||
if self.isCatch:
|
||||
self.cache.sget(tpath)
|
||||
else:
|
||||
self.cache.get(tpath).get()
|
||||
|
||||
|
||||
# TODO: Dumping to cacheFile
|
||||
|
||||
|
||||
class Cache:
|
||||
"""
|
||||
In memory collection of Daisy records
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
filepaths=None,
|
||||
cacheFile=None,
|
||||
path: str = "daisy",
|
||||
walk: bool = False,
|
||||
isCatch: bool = False,
|
||||
):
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
filepaths
|
||||
Either a list of filepaths to load or None
|
||||
|
||||
cacheFile
|
||||
Path to a cache file which is a collection of paths to load
|
||||
|
||||
path: str
|
||||
Path prefix to load records from
|
||||
|
||||
walk: bool
|
||||
Whether to automatically walk the path and load records
|
||||
|
||||
isCatch: bool
|
||||
Whether this cache is for catchs
|
||||
"""
|
||||
self.data = {}
|
||||
self.path = path
|
||||
self.event_handler = CFSHandler(self, isCatch=isCatch)
|
||||
self.observer = Observer()
|
||||
self.observer.schedule(self.event_handler, self.path, recursive=True)
|
||||
self.observer.start()
|
||||
# TODO: Test
|
||||
|
||||
if filepaths != None:
|
||||
for fp in filepaths:
|
||||
fp = path + "/" + fp
|
||||
if os.path.isfile(fp):
|
||||
self.data[fp] = Daisy(fp)
|
||||
elif cacheFile != None:
|
||||
with open(cacheFile, "r") as f:
|
||||
for fp in f.read().split("\n"):
|
||||
self.data[fp] = Daisy(fp)
|
||||
elif walk:
|
||||
for root, dirs, files in os.walk(self.path):
|
||||
for p in dirs + files:
|
||||
# print("walking")
|
||||
if not (".json" in p):
|
||||
if not (".md" in p):
|
||||
tpath = root + "/" + p
|
||||
# print(p)
|
||||
# print(tpath)
|
||||
self.data[tpath] = Daisy(tpath)
|
||||
|
||||
def create(self, path: str, data: dict):
|
||||
"""
|
||||
Create new record
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path: str
|
||||
Path to create record at
|
||||
|
||||
data: dict
|
||||
Data to populate record with
|
||||
"""
|
||||
with open(self.path + "/" + path, "wb") as f:
|
||||
f.write(msgpack.dumps(data))
|
||||
logging.log(10, "Done creating record")
|
||||
self.data[path] = Daisy(self.path + "/" + path)
|
||||
logging.log(10, "Done loading to Daisy")
|
||||
return self.data[path]
|
||||
|
||||
def get(self, path: str):
|
||||
"""
|
||||
Get record at path, else return False
|
||||
|
||||
path: str
|
||||
Path of record
|
||||
"""
|
||||
if path in self.data.keys():
|
||||
return self.data[path]
|
||||
else:
|
||||
if os.path.exists(self.path + "/" + path):
|
||||
self.data[path] = Daisy(self.path + "/" + path)
|
||||
return self.data[path]
|
||||
else:
|
||||
logging.log(10, "File does not exist")
|
||||
return False
|
||||
|
||||
def refresh(self):
|
||||
"""
|
||||
Reload from disk to memory
|
||||
"""
|
||||
for key in self.data.keys():
|
||||
self.data[key].read()
|
||||
|
||||
def search(self, keydict: dict, strict: bool = True):
|
||||
"""
|
||||
Search cache for record for records with values
|
||||
|
||||
keydict: dict
|
||||
Values to search for
|
||||
|
||||
strict: bool
|
||||
Whether to require values match
|
||||
"""
|
||||
results = []
|
||||
for key, val in self.data.items():
|
||||
val = val.get()
|
||||
if strict and type(val) != str:
|
||||
addcheck = False
|
||||
for k, v in keydict.items():
|
||||
if k in val.keys():
|
||||
if v in val[k]:
|
||||
addcheck = True
|
||||
else:
|
||||
addcheck = False
|
||||
break
|
||||
if addcheck:
|
||||
results.append([key, val])
|
||||
elif type(val) != str:
|
||||
for k, v in keydict.items():
|
||||
if k in val.keys():
|
||||
if v in val[k]:
|
||||
results.append([key, val])
|
||||
return results
|
||||
|
||||
|
||||
class Catch(Cache):
|
||||
"""
|
||||
Sub class of Cache for handling catchs
|
||||
|
||||
.. image:: https://git.utopic.work/PierMesh/piermesh/raw/branch/main/imgs/catchdisplay.png
|
||||
"""
|
||||
|
||||
catches = {}
|
||||
|
||||
def __init__(
|
||||
self, path: str = "catch", filepaths=None, catchFile=None, walk: bool = False
|
||||
):
|
||||
"""
|
||||
Basically the same initialization parameters as Catch
|
||||
"""
|
||||
super().__init__(
|
||||
filepaths=filepaths, cacheFile=catchFile, path=path, walk=walk, isCatch=True
|
||||
)
|
||||
|
||||
# TODO: Fins
|
||||
|
||||
def sget(self, path: str):
|
||||
"""
|
||||
Call Cache's get to get record
|
||||
"""
|
||||
return super().get(path)
|
||||
|
||||
def get(self, head: str, tail: str, fins=None):
|
||||
"""
|
||||
Get catch by pieces
|
||||
|
||||
Parameters
|
||||
----------
|
||||
head: str
|
||||
First part of catch (maximum: 4 characters)
|
||||
|
||||
tail: str
|
||||
Second part of catch (maximum: 16 characters)
|
||||
|
||||
fins
|
||||
List of (maximum 8 characters) strings at the end of the catch oe None if none
|
||||
"""
|
||||
r = self.search({"head": head, "tail": tail})
|
||||
return r[0][1]["html"]
|
||||
|
||||
def addc(self, peer, node, seperator, head, tail, data, fins=None):
|
||||
tnpath = "catch/" + node
|
||||
if os.path.exists(tnpath) != True:
|
||||
os.makedirs(tnpath)
|
||||
tppath = tnpath + "/" + peer
|
||||
if os.path.exists(tppath) != True:
|
||||
os.makedirs(tppath)
|
||||
sid = str(random.randrange(0, 999999)).zfill(6)
|
||||
data["seperator"] = seperator
|
||||
data["head"] = head
|
||||
data["tail"] = tail
|
||||
if fins != None:
|
||||
data["fins"] = fins
|
||||
res = self.create("{0}/{1}/{2}".format(node, peer, sid), data)
|
||||
return [sid, res]
|
|
@ -1,9 +0,0 @@
|
|||
Schemaless binary database
|
||||
==========================
|
||||
|
||||
.. autoclass:: Components.daisy.Daisy
|
||||
:members:
|
||||
.. autoclass:: Components.daisy.Cache
|
||||
:members:
|
||||
.. autoclass:: Components.daisy.Catch
|
||||
:members:
|
|
@ -1,7 +1,7 @@
|
|||
import requests
|
||||
import msgpack
|
||||
import lzma
|
||||
from Packets.Packets import Packets
|
||||
from Packets.Message import Message
|
||||
|
||||
|
||||
def get(url: str, params=None):
|
||||
|
@ -17,7 +17,7 @@ def get(url: str, params=None):
|
|||
"""
|
||||
r = requests.get(url, params=params)
|
||||
r = {"response": r.text, "code": r.status_code}
|
||||
return Packets(lzma.compress(msgpack.dumps(r))).get()
|
||||
return Message(lzma.compress(msgpack.dumps(r))).get()
|
||||
|
||||
|
||||
def post(url: str, params=None):
|
||||
|
@ -31,7 +31,6 @@ def post(url: str, params=None):
|
|||
params
|
||||
Requests (library) parameters
|
||||
"""
|
||||
r = requests.post(url, datan=params)
|
||||
r = requests.post(url, data=params)
|
||||
r = {"response": r.text, "code": r.status_code}
|
||||
return Packets(lzma.compress(msgpack.dumps(r))).get()
|
||||
|
||||
return Message(lzma.compress(msgpack.dumps(r))).get()
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
Small internet interop utilities
|
||||
================================
|
||||
hopper: Small internet interop utilities
|
||||
========================================
|
||||
|
||||
.. automodule:: Components.hopper
|
||||
:members:
|
||||
|
|
|
@ -1,206 +0,0 @@
|
|||
import base64
|
||||
import os
|
||||
from cryptography.fernet import Fernet
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import dh
|
||||
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
||||
from cryptography.hazmat.primitives.serialization import (
|
||||
Encoding,
|
||||
NoEncryption,
|
||||
ParameterFormat,
|
||||
PublicFormat,
|
||||
PrivateFormat,
|
||||
)
|
||||
import cryptography.hazmat.primitives.serialization as Serialization
|
||||
import msgpack
|
||||
|
||||
# TODO: Different store directories per node
|
||||
|
||||
|
||||
class DHEFern:
|
||||
def __init__(self, cache, nodeNickname, cLog):
|
||||
self.cLog = cLog
|
||||
self.loadedParams = {}
|
||||
self.loadedKeys = {}
|
||||
self.nodeNickname = nodeNickname
|
||||
self.cache = cache
|
||||
if self.cache.get("cryptography/{0}/paramStore".format(nodeNickname)) == False:
|
||||
self.initStore("param")
|
||||
else:
|
||||
self.params = self.loadParamBytes(
|
||||
self.cache.get(
|
||||
"cryptography/{0}/paramStore".format(nodeNickname)
|
||||
).get()["self"]
|
||||
)
|
||||
if self.cache.get("cryptography/{0}/keyStore".format(nodeNickname)) == False:
|
||||
self.initStore("key")
|
||||
self.genKeyPair()
|
||||
else:
|
||||
tks = self.cache.get("cryptography/{0}/keyStore".format(nodeNickname)).get()
|
||||
self.publicKey = tks["self"]["publicKey"]
|
||||
self.privateKey = tks["self"]["privateKey"]
|
||||
|
||||
def checkInMem(self, store, nodeID):
|
||||
if store == "param":
|
||||
return nodeID in self.loadedParams.keys()
|
||||
elif store == "key":
|
||||
return nodeID in self.loadedKeys.keys()
|
||||
|
||||
def loadRecordToMem(self, store, nodeID):
|
||||
r = self.getRecord(store, nodeID)
|
||||
if r == False:
|
||||
self.cLog(
|
||||
30, "Tried to load nonexistent {0} for node {1}".format(store, nodeID)
|
||||
)
|
||||
return False
|
||||
elif self.checkInMem(store, nodeID):
|
||||
self.cLog(10, "{0}s already deserialized, skipping".format(store))
|
||||
else:
|
||||
if store == "param":
|
||||
self.loadedParams[nodeID] = self.loadParamBytes(r)
|
||||
elif store == "key":
|
||||
self.loadedKeys[nodeID] = {
|
||||
"publicKey": Serialization.load_pem_public_key(r["publicKey"]),
|
||||
"privateKey": Serialization.load_pem_private_key(
|
||||
r["privateKey"], None
|
||||
),
|
||||
}
|
||||
return True
|
||||
|
||||
# TODO: Store class daisy
|
||||
#
|
||||
def getRecord(self, store, key):
|
||||
r = self.cache.get(
|
||||
"cryptography/{0}/{1}Store".format(self.nodeNickname, store)
|
||||
).get()
|
||||
if r == False:
|
||||
self.cLog(20, "Record does not exist")
|
||||
return False
|
||||
else:
|
||||
if key in r.keys():
|
||||
return r[key]
|
||||
else:
|
||||
self.cLog(20, "Record does not exist")
|
||||
return False
|
||||
|
||||
def initStore(self, store):
|
||||
if not os.path.exists("daisy/cryptography/" + self.nodeNickname):
|
||||
os.mkdir("daisy/cryptography/" + self.nodeNickname)
|
||||
if store == "param":
|
||||
self.genParams()
|
||||
self.cache.create(
|
||||
"cryptography/{0}/paramStore".format(self.nodeNickname),
|
||||
{"self": self.getParamsBytes()},
|
||||
)
|
||||
elif store == "key":
|
||||
self.cache.create(
|
||||
"cryptography/{0}/keyStore".format(self.nodeNickname), {"self": {}}
|
||||
)
|
||||
else:
|
||||
self.cLog(30, "Store not defined")
|
||||
|
||||
def updateStore(self, store, entry, data, recur=True):
|
||||
self.cache.get(
|
||||
"cryptography/" + self.nodeNickname + "/" + store + "Store"
|
||||
).write(override={entry: data}, recur=recur)
|
||||
|
||||
def genParams(self):
|
||||
params = dh.generate_parameters(generator=2, key_size=2048)
|
||||
self.params = params
|
||||
return params
|
||||
|
||||
def genKeyPair(self, paramsOverride=False, setSelf=True):
|
||||
privateKey = self.params.generate_private_key()
|
||||
if setSelf:
|
||||
self.privateKey = privateKey
|
||||
publicKey = privateKey.public_key()
|
||||
if setSelf:
|
||||
self.publicKey = publicKey
|
||||
self.updateStore(
|
||||
"key",
|
||||
"self",
|
||||
{
|
||||
"publicKey": self.publicKey.public_bytes(
|
||||
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
|
||||
),
|
||||
"privateKey": self.privateKey.private_bytes(
|
||||
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
|
||||
),
|
||||
},
|
||||
)
|
||||
return [privateKey, publicKey]
|
||||
else:
|
||||
publicKey = publicKey.public_bytes(
|
||||
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
|
||||
)
|
||||
privateKey = privateKey.private_bytes(
|
||||
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
|
||||
)
|
||||
return [privateKey, publicKey]
|
||||
|
||||
def keyDerive(self, pubKey, salt, nodeID, params):
|
||||
if self.checkInMem("param", nodeID) == False:
|
||||
if self.getRecord("param", nodeID) == False:
|
||||
self.updateStore("param", nodeID, params, recur=False)
|
||||
self.loadRecordToMem("param", nodeID)
|
||||
self.cLog(20, "Precheck done for key derivation")
|
||||
|
||||
# TODO: Load them and if private key exists load it, otherwise generate a private key
|
||||
if self.checkInMem("key", nodeID) == False:
|
||||
if self.getRecord("key", nodeID) == False:
|
||||
privateKey, publicKey = self.genKeyPair(setSelf=False)
|
||||
self.updateStore(
|
||||
"key", nodeID, {"publicKey": publicKey, "privateKey": privateKey}
|
||||
)
|
||||
self.loadRecordToMem("key", nodeID)
|
||||
|
||||
sharedKey = self.loadedKeys[nodeID]["privateKey"].exchange(
|
||||
Serialization.load_pem_public_key(pubKey)
|
||||
)
|
||||
# Perform key derivation.
|
||||
self.cLog(20, "Performing key derivation")
|
||||
derivedKey = HKDF(
|
||||
algorithm=hashes.SHA256(), length=32, salt=salt, info=b"handshake data"
|
||||
).derive(sharedKey)
|
||||
self.cLog(20, "Derived key")
|
||||
ederivedKey = base64.urlsafe_b64encode(derivedKey)
|
||||
tr = self.getRecord("key", nodeID)
|
||||
tr["derivedKey"] = ederivedKey
|
||||
self.updateStore("key", nodeID, tr)
|
||||
self.cLog(20, "Done with cryptography store updates")
|
||||
return ederivedKey
|
||||
|
||||
def getSalt(self):
|
||||
return os.urandom(16)
|
||||
|
||||
def encrypt(self, data, nodeID, isDict=True):
|
||||
r = self.getRecord("key", nodeID)
|
||||
if r == False:
|
||||
self.cLog(20, "Node {0} not in keystore".format(nodeID))
|
||||
return False
|
||||
else:
|
||||
derivedKey = r["derivedKey"]
|
||||
fernet = Fernet(derivedKey)
|
||||
if isDict:
|
||||
data = msgpack.dumps(data)
|
||||
token = fernet.encrypt(data)
|
||||
return token
|
||||
|
||||
def decrypt(self, data, nodeID):
|
||||
r = self.getRecord("key", nodeID)
|
||||
if r == False:
|
||||
self.cLog(20, "No record of node " + nodeID)
|
||||
return False
|
||||
elif not "derivedKey" in r.keys():
|
||||
self.cLog(20, "No key derived for node " + nodeID)
|
||||
return False
|
||||
else:
|
||||
fernet = Fernet(self.getRecord("key", nodeID)["derivedKey"])
|
||||
return msgpack.loads(fernet.decrypt(data))
|
||||
|
||||
def getParamsBytes(self):
|
||||
return self.params.parameter_bytes(Encoding.PEM, ParameterFormat.PKCS3)
|
||||
|
||||
def loadParamBytes(self, pemBytes):
|
||||
self.params = Serialization.load_pem_parameters(pemBytes)
|
||||
return self.params
|
|
@ -1,7 +0,0 @@
|
|||
|
||||
Diffie hellman ephemeral
|
||||
Fernet based encryption
|
||||
==========================
|
||||
|
||||
.. autoclass:: Cryptography.DHEFern.DHEFern
|
||||
:members:
|
|
@ -1,24 +1,48 @@
|
|||
from Packets.Packet import Packet
|
||||
import Components.daisy as d
|
||||
from Daisy.Daisy import Daisy
|
||||
import msgpack
|
||||
|
||||
|
||||
class Header(Packet):
|
||||
"""
|
||||
Metadata packet for messages
|
||||
|
||||
Attributes
|
||||
----------
|
||||
sender: int
|
||||
6 digit (maximum) node or peer ID
|
||||
|
||||
senderDisplayName: int
|
||||
3 digit (maximum) ID for mapping display names to a given user
|
||||
|
||||
recipient: int
|
||||
6 digit (maximum) node or peer ID
|
||||
|
||||
recipientNode: int
|
||||
6 digit (maximum) node ID to route the packet to
|
||||
|
||||
subpacket: bool
|
||||
Whether this is a subpacket
|
||||
|
||||
wantFullResponse: bool
|
||||
Whether a response should be sent when the message completes reception (TODO)
|
||||
|
||||
pAction: int
|
||||
3 digit (maximum) pAction ID for mapping precise actions within a protocol (TODO)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
packetsID,
|
||||
packetCount,
|
||||
sender,
|
||||
senderDisplayName,
|
||||
recipient,
|
||||
recipientNode,
|
||||
json=True,
|
||||
fname=False,
|
||||
subpacket=False,
|
||||
wantFullResponse=False,
|
||||
mimeType=-1,
|
||||
protocol=None,
|
||||
packetsClass=0,
|
||||
packetsID: int,
|
||||
packetCount: int,
|
||||
sender: int,
|
||||
senderDisplayName: int,
|
||||
recipient: int,
|
||||
recipientNode: int,
|
||||
subpacket: bool = False,
|
||||
wantFullResponse: bool = False,
|
||||
packetsClass: int = 0,
|
||||
pAction: int = -1,
|
||||
):
|
||||
super().__init__(
|
||||
"", packetsID=packetsID, packetCount=packetCount, packetsClass=packetsClass
|
||||
|
@ -27,27 +51,30 @@ class Header(Packet):
|
|||
self.senderDisplayName = senderDisplayName
|
||||
self.recipient = recipient
|
||||
self.recipientNode = recipientNode
|
||||
self.json = json
|
||||
self.fname = fname
|
||||
self.subpacket = subpacket
|
||||
self.wantFullResponse = wantFullResponse
|
||||
self.mimeType = mimeType
|
||||
self.pAction = pAction
|
||||
|
||||
def usePreset(self, path):
|
||||
preset = d.Daisy(path)
|
||||
def usePreset(self, path: str):
|
||||
"""
|
||||
Add preset fields to the packet
|
||||
"""
|
||||
preset = Daisy(path)
|
||||
for key in preset.get().keys():
|
||||
self.msg[key] = preset.get()[key]
|
||||
|
||||
def dump(self):
|
||||
"""
|
||||
Dump packet to msgpack encoded binary for transmission
|
||||
"""
|
||||
res = msgpack.loads(super().dump())
|
||||
res["sender"] = self.sender
|
||||
res["senderDisplayName"] = self.senderDisplayName
|
||||
res["recipient"] = self.recipient
|
||||
res["recipientNode"] = self.recipientNode
|
||||
# res["json"] = self.json
|
||||
# res["fname"] = self.fname
|
||||
res["subpacket"] = self.subpacket
|
||||
res["wantFullResponse"] = self.wantFullResponse
|
||||
res["mimeType"] = self.mimeType
|
||||
# res["protocol"] = self.protocol
|
||||
res["packetsClass"] = self.packetsClass
|
||||
res["pAction"] = self.pAction
|
||||
|
||||
return msgpack.dumps(res)
|
||||
|
|
|
@ -1,26 +1,37 @@
|
|||
import lzma, sys
|
||||
from Components.daisy import Daisy
|
||||
import lzma
|
||||
import msgpack
|
||||
import logging
|
||||
|
||||
|
||||
class Packet:
|
||||
def parsePayload(data):
|
||||
msg = msgpack.loads(data)
|
||||
return [
|
||||
msg["packetsID"],
|
||||
msg["packetNumber"],
|
||||
lzma.decompress(msg["data"]),
|
||||
msg["packetsClass"],
|
||||
]
|
||||
"""
|
||||
Base class for Packets
|
||||
|
||||
Attributes
|
||||
----------
|
||||
data: bytes
|
||||
Data part this packet is delivering (usually none when creating a header packet)
|
||||
|
||||
packetsID: int
|
||||
6 digit (maximum) ID for parent Message (TODO: Change to MessageID)
|
||||
|
||||
packetNumber
|
||||
Index of packet or False
|
||||
|
||||
packetCount: int
|
||||
Number of packets in parent Message
|
||||
|
||||
packetsClass: int
|
||||
3 digit (maximum) class/protocol for message (TODO: Chajge to messageProtocol)
|
||||
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data,
|
||||
packetsID=False,
|
||||
data: bytes,
|
||||
packetsID: int = -1,
|
||||
packetNumber=False,
|
||||
packetCount=1,
|
||||
packetsClass=-1,
|
||||
packetCount: int = 1,
|
||||
packetsClass: int = -1,
|
||||
):
|
||||
if packetsID == False:
|
||||
self.packetsID, self.packetNumber, self.data, self.packetsClass = (
|
||||
|
@ -40,7 +51,22 @@ class Packet:
|
|||
for key in edata.get().keys():
|
||||
self.data[key] = edata.get()[key]"""
|
||||
|
||||
def parsePayload(data):
|
||||
"""
|
||||
Parse bytes to Packet (TODO: Fix)
|
||||
"""
|
||||
msg = msgpack.loads(data)
|
||||
return [
|
||||
msg["packetsID"],
|
||||
msg["packetNumber"],
|
||||
lzma.decompress(msg["data"]),
|
||||
msg["packetsClass"],
|
||||
]
|
||||
|
||||
def dump(self):
|
||||
"""
|
||||
Dump packet to msgpack encoded binary data
|
||||
"""
|
||||
res = {
|
||||
"packetsID": self.packetsID,
|
||||
"packetNumber": self.packetNumber,
|
||||
|
@ -50,5 +76,5 @@ class Packet:
|
|||
if res["data"] == "":
|
||||
res.pop("data")
|
||||
ores = msgpack.dumps(res)
|
||||
logging.log(20, "Packet size: " + str(sys.getsizeof(ores)))
|
||||
# logging.log(20, "Packet size: " + str(sys.getsizeof(ores)))
|
||||
return ores
|
||||
|
|
|
@ -1,91 +0,0 @@
|
|||
import Packets.Packet as p
|
||||
import Packets.HeaderPacket as h
|
||||
import lzma
|
||||
import msgpack
|
||||
import random
|
||||
import sys
|
||||
import math
|
||||
|
||||
# Reassemble method
|
||||
|
||||
# Test
|
||||
# Polymorph to accept payload array, done
|
||||
# Create packet instance with payload
|
||||
# Add to and from node ids
|
||||
# Node id generation, random, checked against existing ids
|
||||
|
||||
# DO NOT CHANGE DATA SIZE UNLESS YOU KNOW WHAT YOURE DOING
|
||||
|
||||
|
||||
class Packets:
|
||||
def __init__(
|
||||
self,
|
||||
bytesObject,
|
||||
sender,
|
||||
senderDisplayName,
|
||||
recipient,
|
||||
recipientNode,
|
||||
dataSize=128,
|
||||
wantFullResponse=False,
|
||||
packetsClass=None,
|
||||
):
|
||||
if isinstance(bytesObject, list):
|
||||
# TODO: instantiating HeaderPacket correctly
|
||||
packets = [h.Header(bytesObject[0])]
|
||||
for packet in bytesObject:
|
||||
packets.append(
|
||||
p.Packet(
|
||||
packet["data"],
|
||||
packetsID=packet["packetsID"],
|
||||
packetNumber=packet["packetNumber"],
|
||||
packetsClass=packetsClass,
|
||||
)
|
||||
)
|
||||
self.packets = packets
|
||||
else:
|
||||
bytesObject = lzma.compress(bytesObject)
|
||||
packets = []
|
||||
self.packetsID = random.randrange(0, 999999)
|
||||
pnum = 1
|
||||
blen = math.ceil(len(bytesObject) / dataSize)
|
||||
tb = b""
|
||||
for it in range(blen):
|
||||
if it >= (blen - 1):
|
||||
b = bytesObject[it * dataSize :]
|
||||
else:
|
||||
b = bytesObject[it * dataSize : (it * dataSize + dataSize)]
|
||||
packets.append(
|
||||
p.Packet(b, self.packetsID, pnum, packetsClass=packetsClass)
|
||||
)
|
||||
pnum += 1
|
||||
tb += b
|
||||
packets.insert(
|
||||
0,
|
||||
h.Header(
|
||||
self.packetsID,
|
||||
pnum,
|
||||
sender,
|
||||
senderDisplayName,
|
||||
recipient,
|
||||
recipientNode,
|
||||
wantFullResponse=wantFullResponse,
|
||||
packetsClass=packetsClass,
|
||||
),
|
||||
)
|
||||
for it in range(pnum):
|
||||
packet = msgpack.loads(packets[it].dump())
|
||||
packet["packetCount"] = pnum
|
||||
|
||||
packets[it] = msgpack.dumps(packet)
|
||||
|
||||
self.packets = packets
|
||||
|
||||
def get(self):
|
||||
return self.packets
|
||||
|
||||
def reassemble(self, cm):
|
||||
data = b""
|
||||
for it in range(1, int(cm["packetCount"])):
|
||||
data += cm["data"][cm["dataOrder"].index(it)]
|
||||
res = msgpack.loads(lzma.decompress(data))
|
||||
return res
|
|
@ -1,6 +0,0 @@
|
|||
Packets representation for full
|
||||
message
|
||||
===============================
|
||||
|
||||
.. autoclass:: Packets.Packets.Packets
|
||||
:members:
|
|
@ -1,11 +1,18 @@
|
|||
from .Packet import Packet
|
||||
import msgpack, lzma
|
||||
import msgpack
|
||||
import lzma
|
||||
|
||||
# TODO: Instantiation
|
||||
# TODO: Packet template loading
|
||||
|
||||
|
||||
class SinglePacket(Packet):
|
||||
"""
|
||||
WIP
|
||||
|
||||
Single packet for very small protocols
|
||||
"""
|
||||
|
||||
def __init__(self, data, packetsID, packetsClass=None, cache=None):
|
||||
super().__init__(
|
||||
lzma.compress(msgpack.dumps(data)),
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
async def filter(completeMessage, recipient, recipientNode, onodeID, todo):
|
||||
"""
|
||||
Peer to peer protol
|
||||
"""
|
||||
m = completeMessage
|
||||
if recipientNode == onodeID:
|
||||
todo.append(
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
async def filter(completeMessage, recipient, recipientNode, todo):
|
||||
"""
|
||||
Catch exchange protocol
|
||||
"""
|
||||
m = completeMessage
|
||||
# TODO: Sending to other nodes clients
|
||||
todo.append(
|
||||
|
|
|
@ -2,6 +2,9 @@ import logging
|
|||
|
||||
|
||||
async def filter(completeMessage, recipientNode, todo):
|
||||
"""
|
||||
Cryptographic operations protocol
|
||||
"""
|
||||
todo.append(
|
||||
{
|
||||
"action": "keyDeriveDH",
|
||||
|
@ -12,4 +15,4 @@ async def filter(completeMessage, recipientNode, todo):
|
|||
},
|
||||
}
|
||||
)
|
||||
logging.log(10, "Adding cryptography request")
|
||||
# logging.log(10, "Adding cryptography request")
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import logging
|
||||
|
||||
|
||||
async def filter(completeMessage, todo):
|
||||
"""
|
||||
Network mapping protocol
|
||||
"""
|
||||
m = completeMessage
|
||||
todo.append(
|
||||
{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import msgpack
|
||||
import traceback
|
||||
|
||||
from Packets.Packets import Packets
|
||||
from Packets.Message import Message
|
||||
import Sponge.Protocols.bubble
|
||||
import Sponge.Protocols.map
|
||||
import Sponge.Protocols.catch
|
||||
|
@ -9,6 +9,25 @@ import Sponge.Protocols.cryptography
|
|||
|
||||
|
||||
class Filter:
|
||||
"""
|
||||
Packet filtering orchestration
|
||||
|
||||
cLog
|
||||
Reference to `run.Node.cLog` for logging
|
||||
|
||||
cache: Daisy.Cache.Cache
|
||||
Reference to our Daisy Cache instance
|
||||
|
||||
completed: list
|
||||
List of completed messages IDs
|
||||
|
||||
todo
|
||||
Reference to list of actions to do in the Node
|
||||
|
||||
onodeID
|
||||
PierMesh node ID
|
||||
"""
|
||||
|
||||
def __init__(self, cache, onodeID, todo, cLog):
|
||||
self.cLog = cLog
|
||||
self.cache = cache
|
||||
|
@ -20,7 +39,10 @@ class Filter:
|
|||
self.todo = todo
|
||||
self.onodeID = onodeID
|
||||
|
||||
def mCheck(self, payload):
|
||||
def mCheck(self, payload: bytes):
|
||||
"""
|
||||
Check if payload bytes are msgpack encoded, otherwise skip
|
||||
"""
|
||||
try:
|
||||
msgpack.loads(payload)
|
||||
return True
|
||||
|
@ -29,21 +51,30 @@ class Filter:
|
|||
return False
|
||||
|
||||
def selfCheck(self, packet):
|
||||
"""
|
||||
Check if this is a self packet, if so skip
|
||||
"""
|
||||
if packet["fromId"] == packet["toId"]:
|
||||
self.cLog(20, "Self packet, ignored")
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
async def protoMap(self, protocolID):
|
||||
async def protoMap(self, protocolID: int):
|
||||
"""
|
||||
Get protocol from protocol ID using the mlookup table
|
||||
"""
|
||||
protocolID = str(protocolID).zfill(6)
|
||||
return self.cache.get("mlookup").get()[protocolID]
|
||||
|
||||
async def protoRoute(self, completeMessage):
|
||||
async def protoRoute(self, completeMessage: dict):
|
||||
"""
|
||||
Shorthand reference
|
||||
Route message to proper protocol handler
|
||||
"""
|
||||
m = completeMessage
|
||||
"""
|
||||
Shorthand reference completeMessage for ease
|
||||
"""
|
||||
sender = m["sender"]
|
||||
senderDisplay = m["senderDisplayName"]
|
||||
recipient = m["recipient"]
|
||||
|
@ -67,6 +98,9 @@ class Filter:
|
|||
self.cLog(30, "Cant route, no protocol")
|
||||
|
||||
async def sieve(self, packet):
|
||||
"""
|
||||
Base filtering logic, takes a single MeshTastic packet
|
||||
"""
|
||||
p = packet["decoded"]["payload"]
|
||||
if self.selfCheck(packet) and self.mCheck(p):
|
||||
try:
|
||||
|
@ -103,13 +137,11 @@ class Filter:
|
|||
# TO DO: implement loop
|
||||
# responseLoop(packets_id)
|
||||
pass
|
||||
# TODO: Sorting
|
||||
completeMessage = self.messages[packetsID]
|
||||
completeMessage["data"] = Packets.reassemble(None, completeMessage)
|
||||
del self.messages[packetsID]
|
||||
self.completed.append(packetsID)
|
||||
self.cLog(20, "Assembly completed, routing")
|
||||
# self.cache.create("messages/" + str(packetsID), cm)
|
||||
await self.protoRoute(completeMessage)
|
||||
except Exception as e:
|
||||
self.cLog(30, traceback.print_exc())
|
||||
|
|
|
@ -1,223 +0,0 @@
|
|||
from sys import getsizeof
|
||||
import meshtastic
|
||||
import meshtastic.serial_interface
|
||||
from pubsub import pub
|
||||
from Packets.Packets import Packets as Packets
|
||||
from Packets.SinglePacket import SinglePacket
|
||||
import time
|
||||
from threading import Thread
|
||||
from Components.daisy import Catch, Cache
|
||||
import sys
|
||||
import logging
|
||||
|
||||
# from Filters.base import Filter
|
||||
import msgpack
|
||||
|
||||
import asyncio
|
||||
import random
|
||||
|
||||
|
||||
class Transmitter:
|
||||
def __init__(self, device, filter, onodeID, cache, catch, cryptographyInfo, cLog):
|
||||
self.cLog = cLog
|
||||
self.cryptographyInfo = cryptographyInfo
|
||||
self.filter = filter
|
||||
self.tcache = cache
|
||||
self.tcatch = catch
|
||||
self.html = False
|
||||
self.notConnected = True
|
||||
self.messages = {}
|
||||
self.acks = {}
|
||||
# self.threads = {}
|
||||
self.onodeID = onodeID
|
||||
# Be careful with this
|
||||
self.cpid = 0
|
||||
self.tasks = {}
|
||||
# TODO: use node id to deliver directly
|
||||
pub.subscribe(self.onReceive, "meshtastic.receive")
|
||||
pub.subscribe(self.onConnection, "meshtastic.connection.established")
|
||||
self.interface = meshtastic.serial_interface.SerialInterface(device)
|
||||
i = 0
|
||||
while self.notConnected:
|
||||
if i % 5000000 == 0:
|
||||
self.cLog(20, "Waiting for node initialization...")
|
||||
i += 1
|
||||
self.cLog(20, "Initialized")
|
||||
|
||||
# TODO: Sending packets across multiple nodes/load balancing/distributed packet transmission/reception
|
||||
def onReceive(self, packet, interface):
|
||||
asyncio.new_event_loop().run_until_complete(self.filter.sieve(packet))
|
||||
self.tcache.refresh()
|
||||
|
||||
async def sendAnnounce(self):
|
||||
await self.addPackets(
|
||||
msgpack.dumps(
|
||||
{
|
||||
"onodeID": self.onodeID,
|
||||
"mnodeID": self.interface.localNode.nodeNum,
|
||||
}
|
||||
),
|
||||
self.onodeID,
|
||||
None,
|
||||
True,
|
||||
None,
|
||||
packetsClass=0,
|
||||
)
|
||||
|
||||
def onConnection(self, interface, topic=pub.AUTO_TOPIC):
|
||||
# self.send("connect".encode("utf-8"))
|
||||
# time.sleep(3)
|
||||
asyncio.run(self.sendAnnounce())
|
||||
self.notConnected = False
|
||||
|
||||
def responseCheck(self, packet):
|
||||
rid = packet["decoded"]["requestId"]
|
||||
if packet["decoded"]["routing"]["errorReason"] == "MAX_RETRANSMIT":
|
||||
self.cLog(20, "Got ack error")
|
||||
self.acks[str(rid)] = False
|
||||
else:
|
||||
self.acks[str(rid)] = True
|
||||
|
||||
# TODO: Threaded send method
|
||||
|
||||
def send(self, packet, recipientNode=False):
|
||||
interface = self.interface
|
||||
if recipientNode == False:
|
||||
pid = interface.sendData(
|
||||
packet, wantAck=True, onResponse=self.responseCheck
|
||||
)
|
||||
else:
|
||||
pid = interface.sendData(
|
||||
packet,
|
||||
destinationId=recipientNode,
|
||||
wantAck=True,
|
||||
onResponse=self.responseCheck,
|
||||
)
|
||||
|
||||
# Can I use waitForAckNak on cpid?
|
||||
self.cpid = pid.id
|
||||
return True
|
||||
|
||||
async def awaitResponse(self, pid):
|
||||
for i in range(120):
|
||||
await asyncio.sleep(1)
|
||||
if str(pid) in self.acks:
|
||||
break
|
||||
return True
|
||||
|
||||
async def initNodeDH(self, dhefOb, recipientNode, onodeID):
|
||||
await self.addPackets(
|
||||
msgpack.dumps(
|
||||
{"params": dhefOb.getParamsBytes(), "publicKey": dhefOb.publicKey}
|
||||
),
|
||||
self.onodeID,
|
||||
000000,
|
||||
000000,
|
||||
onodeID,
|
||||
directID=recipientNode,
|
||||
packetsClass=3,
|
||||
)
|
||||
|
||||
def awaitFullResponse(self, pid):
|
||||
for i in range(1_000_000_000):
|
||||
time.sleep(5)
|
||||
if pid in self.messages.keys():
|
||||
if self.messages[pid]["finished"]:
|
||||
break
|
||||
return True
|
||||
|
||||
async def addPackets(
|
||||
self,
|
||||
data,
|
||||
sender,
|
||||
senderName,
|
||||
recipient,
|
||||
recipientNode,
|
||||
directID=False,
|
||||
packetsClass=None,
|
||||
encrypt=False,
|
||||
):
|
||||
interface = self.interface
|
||||
tp = Packets(
|
||||
data,
|
||||
sender,
|
||||
senderName,
|
||||
recipient,
|
||||
recipientNode,
|
||||
packetsClass=packetsClass,
|
||||
)
|
||||
# print(sys.getsizeof(tp.packets[0]))
|
||||
# print(tp.packets[0])
|
||||
for p in tp.packets:
|
||||
# time.sleep(5)
|
||||
if recipientNode == None:
|
||||
# print("sending none")
|
||||
# print(p)
|
||||
self.send(p)
|
||||
else:
|
||||
# print(p)
|
||||
# print(recipientNode)
|
||||
self.cLog(10, "Sending target: " + str(directID))
|
||||
if directID != False:
|
||||
recipientNode = directID
|
||||
self.send(p, recipientNode=recipientNode)
|
||||
awaitTask = asyncio.create_task(self.awaitResponse(self.cpid))
|
||||
await asyncio.sleep(1)
|
||||
currentTask = {
|
||||
"ob": awaitTask,
|
||||
"pid": str(self.cpid),
|
||||
"packet": p,
|
||||
"retry": False,
|
||||
}
|
||||
self.tasks[str(self.cpid)] = currentTask
|
||||
|
||||
async def progressCheck(self):
|
||||
# interface = self.interface
|
||||
while True:
|
||||
await asyncio.sleep(90)
|
||||
self.cLog(
|
||||
20, "Checking progress of {0} tasks".format(len(self.tasks.keys()))
|
||||
)
|
||||
doneFlag = True
|
||||
dcTasks = [k for k in self.tasks.keys()]
|
||||
for task in dcTasks:
|
||||
task = self.tasks[task]
|
||||
if task["ob"]:
|
||||
if task["pid"] in self.acks:
|
||||
if not self.acks[task["pid"]]:
|
||||
retry = task["retry"]
|
||||
remove = False
|
||||
if retry == False:
|
||||
retry = 1
|
||||
elif retry < 3:
|
||||
retry += 1
|
||||
else:
|
||||
self.cLog(30, "Too many retries")
|
||||
remove = True
|
||||
if remove:
|
||||
del self.tasks[task["pid"]]
|
||||
else:
|
||||
self.cLog(20, "Doing retry")
|
||||
doneFlag = False
|
||||
# TODO: Resend to specific node
|
||||
self.send(task["packet"])
|
||||
await_thread = asyncio.create_task(
|
||||
self.awaitResponse(task["pid"])
|
||||
)
|
||||
|
||||
await asyncio.sleep(1)
|
||||
currentTask = {
|
||||
"ob": await_thread,
|
||||
"pid": str(self.cpid),
|
||||
"packet": task["packet"],
|
||||
}
|
||||
currentTask["retry"] = retry
|
||||
self.tasks[task["pid"]] = currentTask
|
||||
else:
|
||||
del self.tasks[task["pid"]]
|
||||
|
||||
async def announce(self):
|
||||
while True:
|
||||
self.cLog(10, "Announce")
|
||||
await self.sendAnnounce()
|
||||
await asyncio.sleep(180)
|
|
@ -1,5 +0,0 @@
|
|||
Layer 0 data transmission
|
||||
===============================
|
||||
|
||||
.. autoclass:: Transmission.transmission.Transmitter
|
||||
:members:
|
|
@ -19,7 +19,11 @@ release = "Proto"
|
|||
# -- General configuration ---------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
||||
|
||||
extensions = ["sphinx_markdown_builder", "sphinx.ext.autodoc", "sphinx.ext.napoleon"]
|
||||
extensions = [
|
||||
"sphinx_markdown_builder",
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.napoleon",
|
||||
]
|
||||
|
||||
templates_path = ["_templates"]
|
||||
exclude_patterns = []
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
<!-- PierMesh documentation master file, created by
|
||||
sphinx-quickstart on Fri Jul 26 23:30:55 2024.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive. -->
|
||||
|
||||
# PierMesh documentation
|
||||
|
||||
Add your content using `reStructuredText` syntax. See the
|
||||
[reStructuredText](https://www.sphinx-doc.org/en/master/usage/restructuredtext/index.html)
|
||||
documentation for details.
|
|
@ -12,8 +12,9 @@ PierMesh documentation
|
|||
ui
|
||||
../Siph/*
|
||||
../Components/*
|
||||
../Daisy/*
|
||||
../Cryptography/*
|
||||
../Sponge/*
|
||||
../Packets/*
|
||||
../Transmission/*
|
||||
../webui/*
|
||||
../Transceiver/*
|
||||
../Splash/*
|
||||
|
|
74
src/run.py
|
@ -1,11 +1,11 @@
|
|||
# PierMesh libraries
|
||||
from Sponge.base import Filter
|
||||
from Siph.map import Network
|
||||
from Components.daisy import Catch
|
||||
from Components.daisy import Cache
|
||||
from webui.serve import Server
|
||||
from Transmission.transmission import Transmitter
|
||||
from Cryptography.DHEFern import DHEFern
|
||||
from Daisy.Catch import Catch
|
||||
from Daisy.Cache import Cache
|
||||
from Splash.serve import Server
|
||||
from Transceiver.Transceiver import Transceiver
|
||||
from Cryptography.WhaleSong import DHEFern
|
||||
from ui import TUI
|
||||
|
||||
# Generic imports
|
||||
|
@ -27,9 +27,8 @@ if __name__ == "__main__":
|
|||
"""
|
||||
Global objects for the PierMesh service and the TUI so we can terminate the associated processes later
|
||||
"""
|
||||
tuiOb = None
|
||||
nodeOb = None
|
||||
|
||||
tuiOb = None
|
||||
# Pull startup parameters
|
||||
device, webPort, serverInfoFile, delay, nodeNickname = sys.argv[1:]
|
||||
|
||||
|
@ -46,7 +45,7 @@ if __name__ == "__main__":
|
|||
|
||||
class Node:
|
||||
"""
|
||||
Node: Class that handles most of the PierMesh data
|
||||
Class that handles most of the PierMesh data
|
||||
|
||||
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/src/run.py>`_
|
||||
|
||||
|
@ -76,8 +75,8 @@ class Node:
|
|||
onodeID: str
|
||||
PierMesh node ID
|
||||
|
||||
oTransmitter: Transmitter
|
||||
LoRa transmitter `Transmitter`
|
||||
oTransceiver: Transceiver
|
||||
LoRa transceiver `Transceiver`
|
||||
|
||||
processed: list
|
||||
List of IDs of already completed messages so that we don't reprocess messages
|
||||
|
@ -109,14 +108,14 @@ class Node:
|
|||
if self.nodeInfo == False:
|
||||
self.cache.create(serverInfoFile, {"nodeID": random.randrange(0, 1000000)})
|
||||
self.nodeInfo = self.cache.get(serverInfoFile)
|
||||
self.network.addin(self.serverInfo.get()["nodeID"])
|
||||
self.network.addin(self.nodeInfo.get()["nodeID"])
|
||||
self.cLog(20, "Siph network stack initialized")
|
||||
self.onodeID = str(self.nodeInfo.get()["nodeID"])
|
||||
self.server = None
|
||||
self.sponge = Filter(self.cache, self.onodeID, self.todo, self.cLog)
|
||||
self.cLog(20, "Filter initialized")
|
||||
self.cLog(10, "Command line arguments: " + ", ".join(sys.argv))
|
||||
self.oTransmitter = None
|
||||
self.oTransceiver = None
|
||||
self.cLog(20, "Cryptography initializing")
|
||||
self.cryptographyInfo = DHEFern(self.cache, nodeNickname, self.cLog)
|
||||
self.cLog(20, "Cryptography initialized")
|
||||
|
@ -145,6 +144,7 @@ class Node:
|
|||
self.toLog.append("[{0}]:\n{1}".format(datetime.datetime.now(), message))
|
||||
|
||||
async def monitor(self):
|
||||
global tuiOb
|
||||
"""
|
||||
Monitor and log ram and cpu usage
|
||||
"""
|
||||
|
@ -153,20 +153,22 @@ class Node:
|
|||
if tuiOb.done:
|
||||
print("Terminating PierMesh service...")
|
||||
self.proc.terminate()
|
||||
await asyncio.sleep(10)
|
||||
memmb = self.proc.memory_info().rss / (1024 * 1024)
|
||||
memmb = round(memmb, 2)
|
||||
cpup = self.proc.cpu_percent(interval=1)
|
||||
self.cLog(
|
||||
20,
|
||||
" MEM: {0} mB | CPU: {1} %".format(
|
||||
memmb,
|
||||
cpup,
|
||||
),
|
||||
)
|
||||
# Set cpu and memory usage in the TUI
|
||||
tuiOb.do_set_cpu_percent(float(cpup))
|
||||
tuiOb.do_set_mem(memmb)
|
||||
await asyncio.sleep(10)
|
||||
memmb = self.proc.memory_info().rss / (1024 * 1024)
|
||||
memmb = round(memmb, 2)
|
||||
cpup = self.proc.cpu_percent(interval=1)
|
||||
self.cLog(
|
||||
20,
|
||||
" MEM: {0} mB | CPU: {1} %".format(
|
||||
memmb,
|
||||
cpup,
|
||||
),
|
||||
)
|
||||
tuiOb.do_set_cpu_percent(float(cpup))
|
||||
tuiOb.do_set_mem(memmb)
|
||||
else:
|
||||
self.cLog(20, "No TUI object, waiting 5 seconds...")
|
||||
await asyncio.sleep(5)
|
||||
|
||||
async def spongeListen(self):
|
||||
"""
|
||||
|
@ -174,7 +176,7 @@ class Node:
|
|||
|
||||
See Also
|
||||
--------
|
||||
Filters.base.sieve: Packet filtering/parsing
|
||||
Sponge.base.sieve: Packet filtering/parsing
|
||||
|
||||
Notes
|
||||
-----
|
||||
|
@ -200,7 +202,7 @@ class Node:
|
|||
|
||||
See Also
|
||||
--------
|
||||
Filters.Protocols: Protocol based packet filtering
|
||||
Sponge.Protocols: Protocol based packet filtering
|
||||
|
||||
webui.serve.Server: Runs a light Microdot web server with http/s and websocket functionality
|
||||
|
||||
|
@ -211,10 +213,6 @@ class Node:
|
|||
async def action_sendCatch(self, data: dict):
|
||||
"""
|
||||
Get catch and return the data to a peer
|
||||
|
||||
See Also
|
||||
--------
|
||||
Bubble.router.Router: Routing class
|
||||
"""
|
||||
res = self.catch.get(data["head"], data["body"], fins=data["fins"])
|
||||
self.server.sendToPeer(data["recipient"], res)
|
||||
|
@ -240,7 +238,7 @@ class Node:
|
|||
Cryptography.DHEFern.DHEFern: End to end encryption functionality
|
||||
"""
|
||||
if self.cryptographyInfo.getRecord("key", data["onodeID"]) == False:
|
||||
await self.oTransmitter.initNodeDH(
|
||||
await self.oTransceiver.initNodeDH(
|
||||
self.cryptographyInfo, int(data["mnodeID"]), data["onodeID"]
|
||||
)
|
||||
|
||||
|
@ -292,7 +290,7 @@ async def main():
|
|||
nodeOb.cLog(20, "Starting up")
|
||||
nodeOb.cLog(20, "Staggering {0} seconds, please wait".format(sys.argv[4]))
|
||||
time.sleep(int(sys.argv[4]))
|
||||
nodeOb.oTransmitter = Transmitter(
|
||||
nodeOb.oTransceiver = Transceiver(
|
||||
sys.argv[1],
|
||||
nodeOb.sponge,
|
||||
nodeOb.onodeID,
|
||||
|
@ -302,7 +300,7 @@ async def main():
|
|||
nodeOb.cLog,
|
||||
)
|
||||
nodeOb.server = Server(
|
||||
nodeOb.oTransmitter,
|
||||
nodeOb.oTransceiver,
|
||||
nodeOb.catch,
|
||||
nodeOb.onodeID,
|
||||
nodeOb.network,
|
||||
|
@ -310,15 +308,17 @@ async def main():
|
|||
)
|
||||
nodeOb.mTasks["list"] = asyncio.create_task(nodeOb.spongeListen())
|
||||
await asyncio.sleep(1)
|
||||
nodeOb.mTasks["pct"] = asyncio.create_task(nodeOb.oTransmitter.progressCheck())
|
||||
nodeOb.mTasks["pct"] = asyncio.create_task(nodeOb.oTransceiver.progressCheck())
|
||||
await asyncio.sleep(1)
|
||||
nodeOb.mTasks["mon"] = asyncio.create_task(nodeOb.monitor())
|
||||
await asyncio.sleep(1)
|
||||
nodeOb.mTasks["announce"] = asyncio.create_task(nodeOb.oTransmitter.announce())
|
||||
nodeOb.mTasks["announce"] = asyncio.create_task(nodeOb.oTransceiver.announce())
|
||||
await asyncio.sleep(1)
|
||||
await nodeOb.server.app.start_server(port=int(sys.argv[2]), debug=True)
|
||||
except KeyboardInterrupt:
|
||||
sys.exit()
|
||||
except Exception:
|
||||
nodeOb.cLog(20, traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
PierMesh service runner
|
||||
=======================
|
||||
run: PierMesh service runner
|
||||
============================
|
||||
|
||||
.. automodule:: run.main
|
||||
.. autoclass:: run.Node
|
||||
:members:
|
||||
:members:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
TUI application
|
||||
ui: TUI application
|
||||
==========================
|
||||
|
||||
.. autoclass:: ui.TUI
|
||||
|
|
|
@ -1,40 +0,0 @@
|
|||
from jinja2 import Environment, FileSystemLoader, select_autoescape
|
||||
import os, markdown2
|
||||
import json, msgpack, subprocess
|
||||
import shutil
|
||||
from distutils.dir_util import copy_tree
|
||||
|
||||
env = Environment(loader=FileSystemLoader("templates"))
|
||||
|
||||
# subprocess.check_call("mmdc -i * -e png")
|
||||
|
||||
# TODO: Generating mmd from docstrings
|
||||
|
||||
for path in os.listdir("diagrams/markdown"):
|
||||
fname = path.split(".")[0]
|
||||
try:
|
||||
subprocess.check_call(
|
||||
"mmdc -i diagrams/markdown/{0} -o res/img/diagrams/{1}.png".format(
|
||||
path, fname
|
||||
),
|
||||
shell=True,
|
||||
)
|
||||
except Exception as e:
|
||||
print("Empty file or other error")
|
||||
|
||||
|
||||
copy_tree("diagrams/markdown", "res/diagrams")
|
||||
copy_tree("res", "build/res")
|
||||
shutil.copyfile("htmx-extensions/src/ws/ws.js", "build/res/js/ws.js")
|
||||
|
||||
tpath = "templates/"
|
||||
|
||||
for path in os.listdir(tpath):
|
||||
if ("base" in path) != True:
|
||||
for t in os.listdir(tpath + path):
|
||||
if os.path.exists("build/" + path) != True:
|
||||
os.makedirs("build/" + path)
|
||||
ipath = tpath + path + "/" + t
|
||||
template = env.get_template(path + "/" + t)
|
||||
with open("build/{0}/{1}".format(path, t), "w") as f:
|
||||
f.write(template.render())
|
|
@ -1,13 +0,0 @@
|
|||
---
|
||||
title: "🔵 Bubble"
|
||||
---
|
||||
erDiagram
|
||||
"👥 Peer" |{..o| "🗄️ Server" : "🔌 WS"
|
||||
"👥 Peer" |{..o| "🗄️ Server": "📄 HTTP/S"
|
||||
"🗄️ Server" |o..o| "📤 Transmitter": "❔ Queries"
|
||||
"📤 Transmitter" |o..o| "📻 PierMesh": "📻 send"
|
||||
"📤 Transmitter" |o..o| "📻 PierMesh": "📻 onReceive"
|
||||
"📤 Transmitter" |o..o| "🧽 Sieve": "📻 onReceive"
|
||||
"🧽 Sieve" |o..o| "💿 Cache": "➕ Write"
|
||||
"💿 Cache" |o..o| "👂 fListen": "➕ Write event"
|
||||
"👂 fListen" |o..o| "🗄️ Server": "✉️ Pass message"
|
|
@ -1,16 +0,0 @@
|
|||
---
|
||||
title: "🐟 Catch"
|
||||
---
|
||||
erDiagram
|
||||
"👥 Peer" |{..o| "🗄️ Server": "🔌 WS"
|
||||
"👥 Peer" |{..o| "🗄️ Server": "📄 HTTP/S"
|
||||
"🗄️ Server" |o..o| "🐟 Catch": "❔ Queries"
|
||||
"📻 PierMesh" |o..o| "🧽 Sieve": "🧽 Filters"
|
||||
"🧽 Sieve" |o..o| "👂 fListen": "👂 Listens for messages"
|
||||
"👂 fListen" |o..o| "🐟 Catch": "❔ Queries"
|
||||
"🐟 Catch" |o..}| "🌼 Daisy": "📄 Store references"
|
||||
"🌼 Daisy" {
|
||||
string filepath
|
||||
dictionary msg
|
||||
}
|
||||
"🌼 Daisy" |o..o| "📁 File system": "📁 CRUD"
|
|
@ -1,5 +0,0 @@
|
|||
---
|
||||
title: "🌼 Daisy"
|
||||
---
|
||||
erDiagram
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
---
|
||||
title: "📻 PierMesh"
|
||||
---
|
||||
erDiagram
|
||||
"👥 Peer" }|..|{ "🗄️Server" : "🔌 WS"
|
||||
"👥 Peer" }|..|{ "🗄️Server": "📄 HTTP/S"
|
||||
"🗄️Server" |o..o| "🐟 Catch": "❔ Queries"
|
||||
"🗄️Server" |o..o| "💿 Cache": "❔ Queries"
|
||||
"🗄️Server" |o..o| "📤 Transmitter": "❔ Queries"
|
||||
"🐟 Catch" |o..o| "📤 Transmitter": "❔ Queries"
|
||||
"🐟 Catch" |o..o| "👥 Peer": "🔌 WS"
|
||||
"🐟 Catch" |o..o| "📤 Transmitter": "✉️ Sync packet"
|
||||
"💿 Cache" |o..o| "📤 Transmitter": "✉️ Sync packet"
|
||||
"👂 fListen" |o..o| "💿 Cache": "👂 Listen for completed messages"
|
||||
"👂 fListen" |o..o| "👥 Peer": "🔌 WS"
|
||||
"📤 Transmitter" |o..o| "🔽 onReceive": "✉️ Get packet"
|
||||
"🔽 onReceive" |o..o| "🧽 Sieve": "🧽 Filter packet"
|
||||
"🧽 Sieve" |o..o| "💿 Cache": "➕ Push completed messages"
|
||||
"📤 Transmitter" |o..o| "📻 PierMesh": "📻 Send"
|
||||
"📻 PierMesh" |o..o| "🔽 onReceive": "🔽 Receive"
|
|
@ -1,5 +0,0 @@
|
|||
---
|
||||
title: "📤 Transmitter"
|
||||
---
|
||||
erDiagram
|
||||
|
Before Width: | Height: | Size: 89 KiB |
|
@ -1 +0,0 @@
|
|||
mmdc -i res/misc/dia.mmd -o res/misc/dia.png
|
|
@ -1,10 +0,0 @@
|
|||
@font-face {
|
||||
font-family: 'Ubuntu Nerd Font';
|
||||
src: url('/res/fonts/UbuntuNF.eot');
|
||||
src: url('/res/fonts/UbuntuNF.eot?#iefix') format('embedded-opentype'),
|
||||
url('/res/fonts/UbuntuNF.woff2') format('woff2'),
|
||||
url('/res/fonts/UbuntuNF.woff') format('woff');
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
}
|
|
@ -1,37 +0,0 @@
|
|||
:root {
|
||||
--palette-text-white: #FFFFFF;
|
||||
--palette-text-black: #000000;
|
||||
--palette-text-three: #3A424D;
|
||||
--palette-text-four: #5B8080;
|
||||
|
||||
--palette-one: #3A4D24;
|
||||
--palette-two: #A6B08E;
|
||||
--palette-three: #879B77;
|
||||
--palette-four: #61805B;
|
||||
}
|
||||
|
||||
html {
|
||||
background-color: var(--palette-one);
|
||||
color: var(--palette-text-white);
|
||||
font-family: 'Ubuntu Nerd Font';
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
.plank {
|
||||
padding: 10px;
|
||||
background-color: var(--palette-two);
|
||||
}
|
||||
|
||||
ul {
|
||||
padding: 0;
|
||||
list-style-type: none !important;
|
||||
}
|
||||
|
||||
li {
|
||||
padding-top: 5px;
|
||||
}
|
||||
|
||||
input[type=text],
|
||||
input[type=number] {
|
||||
min-width: 150px;
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
---
|
||||
title: "🔵 Bubble"
|
||||
---
|
||||
erDiagram
|
||||
"👥 Peer" |{..o| "🗄️ Server" : "🔌 WS"
|
||||
"👥 Peer" |{..o| "🗄️ Server": "📄 HTTP/S"
|
||||
"🗄️ Server" |o..o| "📤 Transmitter": "❔ Queries"
|
||||
"📤 Transmitter" |o..o| "📻 PierMesh": "📻 send"
|
||||
"📤 Transmitter" |o..o| "📻 PierMesh": "📻 onReceive"
|
||||
"📤 Transmitter" |o..o| "🧽 Sieve": "📻 onReceive"
|
||||
"🧽 Sieve" |o..o| "💿 Cache": "➕ Write"
|
||||
"💿 Cache" |o..o| "👂 fListen": "➕ Write event"
|
||||
"👂 fListen" |o..o| "🗄️ Server": "✉️ Pass message"
|
|
@ -1,16 +0,0 @@
|
|||
---
|
||||
title: "🐟 Catch"
|
||||
---
|
||||
erDiagram
|
||||
"👥 Peer" |{..o| "🗄️ Server": "🔌 WS"
|
||||
"👥 Peer" |{..o| "🗄️ Server": "📄 HTTP/S"
|
||||
"🗄️ Server" |o..o| "🐟 Catch": "❔ Queries"
|
||||
"📻 PierMesh" |o..o| "🧽 Sieve": "🧽 Filters"
|
||||
"🧽 Sieve" |o..o| "👂 fListen": "👂 Listens for messages"
|
||||
"👂 fListen" |o..o| "🐟 Catch": "❔ Queries"
|
||||
"🐟 Catch" |o..}| "🌼 Daisy": "📄 Store references"
|
||||
"🌼 Daisy" {
|
||||
string filepath
|
||||
dictionary msg
|
||||
}
|
||||
"🌼 Daisy" |o..o| "📁 File system": "📁 CRUD"
|
|
@ -1,5 +0,0 @@
|
|||
---
|
||||
title: "🌼 Daisy"
|
||||
---
|
||||
erDiagram
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
---
|
||||
title: "📻 PierMesh"
|
||||
---
|
||||
erDiagram
|
||||
"👥 Peer" }|..|{ "🗄️Server" : "🔌 WS"
|
||||
"👥 Peer" }|..|{ "🗄️Server": "📄 HTTP/S"
|
||||
"🗄️Server" |o..o| "🐟 Catch": "❔ Queries"
|
||||
"🗄️Server" |o..o| "💿 Cache": "❔ Queries"
|
||||
"🗄️Server" |o..o| "📤 Transmitter": "❔ Queries"
|
||||
"🐟 Catch" |o..o| "📤 Transmitter": "❔ Queries"
|
||||
"🐟 Catch" |o..o| "👥 Peer": "🔌 WS"
|
||||
"🐟 Catch" |o..o| "📤 Transmitter": "✉️ Sync packet"
|
||||
"💿 Cache" |o..o| "📤 Transmitter": "✉️ Sync packet"
|
||||
"👂 fListen" |o..o| "💿 Cache": "👂 Listen for completed messages"
|
||||
"👂 fListen" |o..o| "👥 Peer": "🔌 WS"
|
||||
"📤 Transmitter" |o..o| "🔽 onReceive": "✉️ Get packet"
|
||||
"🔽 onReceive" |o..o| "🧽 Sieve": "🧽 Filter packet"
|
||||
"🧽 Sieve" |o..o| "💿 Cache": "➕ Push completed messages"
|
||||
"📤 Transmitter" |o..o| "📻 PierMesh": "📻 Send"
|
||||
"📻 PierMesh" |o..o| "🔽 onReceive": "🔽 Receive"
|
|
@ -1,5 +0,0 @@
|
|||
---
|
||||
title: "📤 Transmitter"
|
||||
---
|
||||
erDiagram
|
||||
|
Before Width: | Height: | Size: 606 B |
Before Width: | Height: | Size: 662 B |
Before Width: | Height: | Size: 41 KiB |
Before Width: | Height: | Size: 38 KiB |
Before Width: | Height: | Size: 2.2 KiB |
Before Width: | Height: | Size: 89 KiB |
Before Width: | Height: | Size: 2.5 KiB |
Before Width: | Height: | Size: 2.3 KiB |
|
@ -1 +0,0 @@
|
|||
npx vite build
|
|
@ -1,815 +0,0 @@
|
|||
{
|
||||
"name": "piermesh",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "piermesh",
|
||||
"version": "1.0.0",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"htmx.org": "2.0.0",
|
||||
"three": "^0.166.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"vite": "^5.3.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/aix-ppc64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz",
|
||||
"integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"aix"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/android-arm": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz",
|
||||
"integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/android-arm64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz",
|
||||
"integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/android-x64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz",
|
||||
"integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/darwin-arm64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz",
|
||||
"integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/darwin-x64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz",
|
||||
"integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/freebsd-arm64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz",
|
||||
"integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"freebsd"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/freebsd-x64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz",
|
||||
"integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"freebsd"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-arm": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz",
|
||||
"integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-arm64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz",
|
||||
"integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-ia32": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz",
|
||||
"integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-loong64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz",
|
||||
"integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==",
|
||||
"cpu": [
|
||||
"loong64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-mips64el": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz",
|
||||
"integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==",
|
||||
"cpu": [
|
||||
"mips64el"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-ppc64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz",
|
||||
"integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-riscv64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz",
|
||||
"integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==",
|
||||
"cpu": [
|
||||
"riscv64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-s390x": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz",
|
||||
"integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==",
|
||||
"cpu": [
|
||||
"s390x"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-x64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz",
|
||||
"integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/netbsd-x64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz",
|
||||
"integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"netbsd"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/openbsd-x64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz",
|
||||
"integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"openbsd"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/sunos-x64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz",
|
||||
"integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"sunos"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/win32-arm64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz",
|
||||
"integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/win32-ia32": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz",
|
||||
"integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/win32-x64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz",
|
||||
"integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@rollup/rollup-android-arm-eabi": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.19.0.tgz",
|
||||
"integrity": "sha512-JlPfZ/C7yn5S5p0yKk7uhHTTnFlvTgLetl2VxqE518QgyM7C9bSfFTYvB/Q/ftkq0RIPY4ySxTz+/wKJ/dXC0w==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-android-arm64": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.19.0.tgz",
|
||||
"integrity": "sha512-RDxUSY8D1tWYfn00DDi5myxKgOk6RvWPxhmWexcICt/MEC6yEMr4HNCu1sXXYLw8iAsg0D44NuU+qNq7zVWCrw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-darwin-arm64": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.19.0.tgz",
|
||||
"integrity": "sha512-emvKHL4B15x6nlNTBMtIaC9tLPRpeA5jMvRLXVbl/W9Ie7HhkrE7KQjvgS9uxgatL1HmHWDXk5TTS4IaNJxbAA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-darwin-x64": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.19.0.tgz",
|
||||
"integrity": "sha512-fO28cWA1dC57qCd+D0rfLC4VPbh6EOJXrreBmFLWPGI9dpMlER2YwSPZzSGfq11XgcEpPukPTfEVFtw2q2nYJg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.19.0.tgz",
|
||||
"integrity": "sha512-2Rn36Ubxdv32NUcfm0wB1tgKqkQuft00PtM23VqLuCUR4N5jcNWDoV5iBC9jeGdgS38WK66ElncprqgMUOyomw==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.19.0.tgz",
|
||||
"integrity": "sha512-gJuzIVdq/X1ZA2bHeCGCISe0VWqCoNT8BvkQ+BfsixXwTOndhtLUpOg0A1Fcx/+eA6ei6rMBzlOz4JzmiDw7JQ==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm64-gnu": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.19.0.tgz",
|
||||
"integrity": "sha512-0EkX2HYPkSADo9cfeGFoQ7R0/wTKb7q6DdwI4Yn/ULFE1wuRRCHybxpl2goQrx4c/yzK3I8OlgtBu4xvted0ug==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm64-musl": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.19.0.tgz",
|
||||
"integrity": "sha512-GlIQRj9px52ISomIOEUq/IojLZqzkvRpdP3cLgIE1wUWaiU5Takwlzpz002q0Nxxr1y2ZgxC2obWxjr13lvxNQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.19.0.tgz",
|
||||
"integrity": "sha512-N6cFJzssruDLUOKfEKeovCKiHcdwVYOT1Hs6dovDQ61+Y9n3Ek4zXvtghPPelt6U0AH4aDGnDLb83uiJMkWYzQ==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.19.0.tgz",
|
||||
"integrity": "sha512-2DnD3mkS2uuam/alF+I7M84koGwvn3ZVD7uG+LEWpyzo/bq8+kKnus2EVCkcvh6PlNB8QPNFOz6fWd5N8o1CYg==",
|
||||
"cpu": [
|
||||
"riscv64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-s390x-gnu": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.19.0.tgz",
|
||||
"integrity": "sha512-D6pkaF7OpE7lzlTOFCB2m3Ngzu2ykw40Nka9WmKGUOTS3xcIieHe82slQlNq69sVB04ch73thKYIWz/Ian8DUA==",
|
||||
"cpu": [
|
||||
"s390x"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-x64-gnu": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.19.0.tgz",
|
||||
"integrity": "sha512-HBndjQLP8OsdJNSxpNIN0einbDmRFg9+UQeZV1eiYupIRuZsDEoeGU43NQsS34Pp166DtwQOnpcbV/zQxM+rWA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-x64-musl": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.19.0.tgz",
|
||||
"integrity": "sha512-HxfbvfCKJe/RMYJJn0a12eiOI9OOtAUF4G6ozrFUK95BNyoJaSiBjIOHjZskTUffUrB84IPKkFG9H9nEvJGW6A==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-arm64-msvc": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.19.0.tgz",
|
||||
"integrity": "sha512-HxDMKIhmcguGTiP5TsLNolwBUK3nGGUEoV/BO9ldUBoMLBssvh4J0X8pf11i1fTV7WShWItB1bKAKjX4RQeYmg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-ia32-msvc": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.19.0.tgz",
|
||||
"integrity": "sha512-xItlIAZZaiG/u0wooGzRsx11rokP4qyc/79LkAOdznGRAbOFc+SfEdfUOszG1odsHNgwippUJavag/+W/Etc6Q==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-x64-msvc": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.19.0.tgz",
|
||||
"integrity": "sha512-xNo5fV5ycvCCKqiZcpB65VMR11NJB+StnxHz20jdqRAktfdfzhgjTiJ2doTDQE/7dqGaV5I7ZGqKpgph6lCIag==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@types/estree": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz",
|
||||
"integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/esbuild": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz",
|
||||
"integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"bin": {
|
||||
"esbuild": "bin/esbuild"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@esbuild/aix-ppc64": "0.21.5",
|
||||
"@esbuild/android-arm": "0.21.5",
|
||||
"@esbuild/android-arm64": "0.21.5",
|
||||
"@esbuild/android-x64": "0.21.5",
|
||||
"@esbuild/darwin-arm64": "0.21.5",
|
||||
"@esbuild/darwin-x64": "0.21.5",
|
||||
"@esbuild/freebsd-arm64": "0.21.5",
|
||||
"@esbuild/freebsd-x64": "0.21.5",
|
||||
"@esbuild/linux-arm": "0.21.5",
|
||||
"@esbuild/linux-arm64": "0.21.5",
|
||||
"@esbuild/linux-ia32": "0.21.5",
|
||||
"@esbuild/linux-loong64": "0.21.5",
|
||||
"@esbuild/linux-mips64el": "0.21.5",
|
||||
"@esbuild/linux-ppc64": "0.21.5",
|
||||
"@esbuild/linux-riscv64": "0.21.5",
|
||||
"@esbuild/linux-s390x": "0.21.5",
|
||||
"@esbuild/linux-x64": "0.21.5",
|
||||
"@esbuild/netbsd-x64": "0.21.5",
|
||||
"@esbuild/openbsd-x64": "0.21.5",
|
||||
"@esbuild/sunos-x64": "0.21.5",
|
||||
"@esbuild/win32-arm64": "0.21.5",
|
||||
"@esbuild/win32-ia32": "0.21.5",
|
||||
"@esbuild/win32-x64": "0.21.5"
|
||||
}
|
||||
},
|
||||
"node_modules/fsevents": {
|
||||
"version": "2.3.3",
|
||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
|
||||
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/htmx.org": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/htmx.org/-/htmx.org-2.0.0.tgz",
|
||||
"integrity": "sha512-N0r1VjrqeCpig0mTi2/sooDZBeQlp1RBohnWQ/ufqc7ICaI0yjs04fNGhawm6+/HWhJFlcXn8MqOjWI9QGG2lQ=="
|
||||
},
|
||||
"node_modules/nanoid": {
|
||||
"version": "3.3.7",
|
||||
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz",
|
||||
"integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
],
|
||||
"bin": {
|
||||
"nanoid": "bin/nanoid.cjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/picocolors": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz",
|
||||
"integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/postcss": {
|
||||
"version": "8.4.39",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.39.tgz",
|
||||
"integrity": "sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/postcss/"
|
||||
},
|
||||
{
|
||||
"type": "tidelift",
|
||||
"url": "https://tidelift.com/funding/github/npm/postcss"
|
||||
},
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
],
|
||||
"dependencies": {
|
||||
"nanoid": "^3.3.7",
|
||||
"picocolors": "^1.0.1",
|
||||
"source-map-js": "^1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^10 || ^12 || >=14"
|
||||
}
|
||||
},
|
||||
"node_modules/rollup": {
|
||||
"version": "4.19.0",
|
||||
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.19.0.tgz",
|
||||
"integrity": "sha512-5r7EYSQIowHsK4eTZ0Y81qpZuJz+MUuYeqmmYmRMl1nwhdmbiYqt5jwzf6u7wyOzJgYqtCRMtVRKOtHANBz7rA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/estree": "1.0.5"
|
||||
},
|
||||
"bin": {
|
||||
"rollup": "dist/bin/rollup"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0",
|
||||
"npm": ">=8.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@rollup/rollup-android-arm-eabi": "4.19.0",
|
||||
"@rollup/rollup-android-arm64": "4.19.0",
|
||||
"@rollup/rollup-darwin-arm64": "4.19.0",
|
||||
"@rollup/rollup-darwin-x64": "4.19.0",
|
||||
"@rollup/rollup-linux-arm-gnueabihf": "4.19.0",
|
||||
"@rollup/rollup-linux-arm-musleabihf": "4.19.0",
|
||||
"@rollup/rollup-linux-arm64-gnu": "4.19.0",
|
||||
"@rollup/rollup-linux-arm64-musl": "4.19.0",
|
||||
"@rollup/rollup-linux-powerpc64le-gnu": "4.19.0",
|
||||
"@rollup/rollup-linux-riscv64-gnu": "4.19.0",
|
||||
"@rollup/rollup-linux-s390x-gnu": "4.19.0",
|
||||
"@rollup/rollup-linux-x64-gnu": "4.19.0",
|
||||
"@rollup/rollup-linux-x64-musl": "4.19.0",
|
||||
"@rollup/rollup-win32-arm64-msvc": "4.19.0",
|
||||
"@rollup/rollup-win32-ia32-msvc": "4.19.0",
|
||||
"@rollup/rollup-win32-x64-msvc": "4.19.0",
|
||||
"fsevents": "~2.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/source-map-js": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz",
|
||||
"integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/three": {
|
||||
"version": "0.166.1",
|
||||
"resolved": "https://registry.npmjs.org/three/-/three-0.166.1.tgz",
|
||||
"integrity": "sha512-LtuafkKHHzm61AQA1be2MAYIw1IjmhOUxhBa0prrLpEMWbV7ijvxCRHjSgHPGp2493wLBzwKV46tA9nivLEgKg=="
|
||||
},
|
||||
"node_modules/vite": {
|
||||
"version": "5.3.4",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.3.4.tgz",
|
||||
"integrity": "sha512-Cw+7zL3ZG9/NZBB8C+8QbQZmR54GwqIz+WMI4b3JgdYJvX+ny9AjJXqkGQlDXSXRP9rP0B4tbciRMOVEKulVOA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"esbuild": "^0.21.3",
|
||||
"postcss": "^8.4.39",
|
||||
"rollup": "^4.13.0"
|
||||
},
|
||||
"bin": {
|
||||
"vite": "bin/vite.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.0.0 || >=20.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/vitejs/vite?sponsor=1"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "~2.3.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/node": "^18.0.0 || >=20.0.0",
|
||||
"less": "*",
|
||||
"lightningcss": "^1.21.0",
|
||||
"sass": "*",
|
||||
"stylus": "*",
|
||||
"sugarss": "*",
|
||||
"terser": "^5.4.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/node": {
|
||||
"optional": true
|
||||
},
|
||||
"less": {
|
||||
"optional": true
|
||||
},
|
||||
"lightningcss": {
|
||||
"optional": true
|
||||
},
|
||||
"sass": {
|
||||
"optional": true
|
||||
},
|
||||
"stylus": {
|
||||
"optional": true
|
||||
},
|
||||
"sugarss": {
|
||||
"optional": true
|
||||
},
|
||||
"terser": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
{
|
||||
"name": "piermesh",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"dependencies": {
|
||||
"htmx.org": "2.0.0",
|
||||
"three": "^0.166.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"vite": "^5.3.4"
|
||||
}
|
||||
}
|
|
@ -1,467 +0,0 @@
|
|||
/*
|
||||
WebSockets Extension
|
||||
============================
|
||||
This extension adds support for WebSockets to htmx. See /www/extensions/ws.md for usage instructions.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
/** @type {import("../htmx").HtmxInternalApi} */
|
||||
var api
|
||||
|
||||
htmx.defineExtension('ws', {
|
||||
|
||||
/**
|
||||
* init is called once, when this extension is first registered.
|
||||
* @param {import("../htmx").HtmxInternalApi} apiRef
|
||||
*/
|
||||
init: function(apiRef) {
|
||||
// Store reference to internal API
|
||||
api = apiRef
|
||||
|
||||
// Default function for creating new EventSource objects
|
||||
if (!htmx.createWebSocket) {
|
||||
htmx.createWebSocket = createWebSocket
|
||||
}
|
||||
|
||||
// Default setting for reconnect delay
|
||||
if (!htmx.config.wsReconnectDelay) {
|
||||
htmx.config.wsReconnectDelay = 'full-jitter'
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* onEvent handles all events passed to this extension.
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {Event} evt
|
||||
*/
|
||||
onEvent: function(name, evt) {
|
||||
var parent = evt.target || evt.detail.elt
|
||||
switch (name) {
|
||||
// Try to close the socket when elements are removed
|
||||
case 'htmx:beforeCleanupElement':
|
||||
|
||||
var internalData = api.getInternalData(parent)
|
||||
|
||||
if (internalData.webSocket) {
|
||||
internalData.webSocket.close()
|
||||
}
|
||||
return
|
||||
|
||||
// Try to create websockets when elements are processed
|
||||
case 'htmx:beforeProcessNode':
|
||||
|
||||
forEach(queryAttributeOnThisOrChildren(parent, 'ws-connect'), function(child) {
|
||||
ensureWebSocket(child)
|
||||
})
|
||||
forEach(queryAttributeOnThisOrChildren(parent, 'ws-send'), function(child) {
|
||||
ensureWebSocketSend(child)
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
function splitOnWhitespace(trigger) {
|
||||
return trigger.trim().split(/\s+/)
|
||||
}
|
||||
|
||||
function getLegacyWebsocketURL(elt) {
|
||||
var legacySSEValue = api.getAttributeValue(elt, 'hx-ws')
|
||||
if (legacySSEValue) {
|
||||
var values = splitOnWhitespace(legacySSEValue)
|
||||
for (var i = 0; i < values.length; i++) {
|
||||
var value = values[i].split(/:(.+)/)
|
||||
if (value[0] === 'connect') {
|
||||
return value[1]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ensureWebSocket creates a new WebSocket on the designated element, using
|
||||
* the element's "ws-connect" attribute.
|
||||
* @param {HTMLElement} socketElt
|
||||
* @returns
|
||||
*/
|
||||
function ensureWebSocket(socketElt) {
|
||||
// If the element containing the WebSocket connection no longer exists, then
|
||||
// do not connect/reconnect the WebSocket.
|
||||
if (!api.bodyContains(socketElt)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Get the source straight from the element's value
|
||||
var wssSource = api.getAttributeValue(socketElt, 'ws-connect')
|
||||
|
||||
if (wssSource == null || wssSource === '') {
|
||||
var legacySource = getLegacyWebsocketURL(socketElt)
|
||||
if (legacySource == null) {
|
||||
return
|
||||
} else {
|
||||
wssSource = legacySource
|
||||
}
|
||||
}
|
||||
|
||||
// Guarantee that the wssSource value is a fully qualified URL
|
||||
if (wssSource.indexOf('/') === 0) {
|
||||
var base_part = location.hostname + (location.port ? ':' + location.port : '')
|
||||
if (location.protocol === 'https:') {
|
||||
wssSource = 'wss://' + base_part + wssSource
|
||||
} else if (location.protocol === 'http:') {
|
||||
wssSource = 'ws://' + base_part + wssSource
|
||||
}
|
||||
}
|
||||
|
||||
var socketWrapper = createWebsocketWrapper(socketElt, function() {
|
||||
return htmx.createWebSocket(wssSource)
|
||||
})
|
||||
|
||||
socketWrapper.addEventListener('message', function(event) {
|
||||
if (maybeCloseWebSocketSource(socketElt)) {
|
||||
return
|
||||
}
|
||||
|
||||
var response = event.data
|
||||
if (!api.triggerEvent(socketElt, 'htmx:wsBeforeMessage', {
|
||||
message: response,
|
||||
socketWrapper: socketWrapper.publicInterface
|
||||
})) {
|
||||
return
|
||||
}
|
||||
|
||||
api.withExtensions(socketElt, function(extension) {
|
||||
response = extension.transformResponse(response, null, socketElt)
|
||||
})
|
||||
|
||||
var settleInfo = api.makeSettleInfo(socketElt)
|
||||
var fragment = api.makeFragment(response)
|
||||
|
||||
if (fragment.children.length) {
|
||||
var children = Array.from(fragment.children)
|
||||
for (var i = 0; i < children.length; i++) {
|
||||
api.oobSwap(api.getAttributeValue(children[i], 'hx-swap-oob') || 'true', children[i], settleInfo)
|
||||
}
|
||||
}
|
||||
|
||||
api.settleImmediately(settleInfo.tasks)
|
||||
api.triggerEvent(socketElt, 'htmx:wsAfterMessage', { message: response, socketWrapper: socketWrapper.publicInterface })
|
||||
})
|
||||
|
||||
// Put the WebSocket into the HTML Element's custom data.
|
||||
api.getInternalData(socketElt).webSocket = socketWrapper
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} WebSocketWrapper
|
||||
* @property {WebSocket} socket
|
||||
* @property {Array<{message: string, sendElt: Element}>} messageQueue
|
||||
* @property {number} retryCount
|
||||
* @property {(message: string, sendElt: Element) => void} sendImmediately sendImmediately sends message regardless of websocket connection state
|
||||
* @property {(message: string, sendElt: Element) => void} send
|
||||
* @property {(event: string, handler: Function) => void} addEventListener
|
||||
* @property {() => void} handleQueuedMessages
|
||||
* @property {() => void} init
|
||||
* @property {() => void} close
|
||||
*/
|
||||
/**
|
||||
*
|
||||
* @param socketElt
|
||||
* @param socketFunc
|
||||
* @returns {WebSocketWrapper}
|
||||
*/
|
||||
function createWebsocketWrapper(socketElt, socketFunc) {
|
||||
var wrapper = {
|
||||
socket: null,
|
||||
messageQueue: [],
|
||||
retryCount: 0,
|
||||
|
||||
/** @type {Object<string, Function[]>} */
|
||||
events: {},
|
||||
|
||||
addEventListener: function(event, handler) {
|
||||
if (this.socket) {
|
||||
this.socket.addEventListener(event, handler)
|
||||
}
|
||||
|
||||
if (!this.events[event]) {
|
||||
this.events[event] = []
|
||||
}
|
||||
|
||||
this.events[event].push(handler)
|
||||
},
|
||||
|
||||
sendImmediately: function(message, sendElt) {
|
||||
if (!this.socket) {
|
||||
api.triggerErrorEvent()
|
||||
}
|
||||
if (!sendElt || api.triggerEvent(sendElt, 'htmx:wsBeforeSend', {
|
||||
message,
|
||||
socketWrapper: this.publicInterface
|
||||
})) {
|
||||
this.socket.send(message)
|
||||
sendElt && api.triggerEvent(sendElt, 'htmx:wsAfterSend', {
|
||||
message,
|
||||
socketWrapper: this.publicInterface
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
send: function(message, sendElt) {
|
||||
if (this.socket.readyState !== this.socket.OPEN) {
|
||||
this.messageQueue.push({ message, sendElt })
|
||||
} else {
|
||||
this.sendImmediately(message, sendElt)
|
||||
}
|
||||
},
|
||||
|
||||
handleQueuedMessages: function() {
|
||||
while (this.messageQueue.length > 0) {
|
||||
var queuedItem = this.messageQueue[0]
|
||||
if (this.socket.readyState === this.socket.OPEN) {
|
||||
this.sendImmediately(queuedItem.message, queuedItem.sendElt)
|
||||
this.messageQueue.shift()
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
init: function() {
|
||||
if (this.socket && this.socket.readyState === this.socket.OPEN) {
|
||||
// Close discarded socket
|
||||
this.socket.close()
|
||||
}
|
||||
|
||||
// Create a new WebSocket and event handlers
|
||||
/** @type {WebSocket} */
|
||||
var socket = socketFunc()
|
||||
|
||||
// The event.type detail is added for interface conformance with the
|
||||
// other two lifecycle events (open and close) so a single handler method
|
||||
// can handle them polymorphically, if required.
|
||||
api.triggerEvent(socketElt, 'htmx:wsConnecting', { event: { type: 'connecting' } })
|
||||
|
||||
this.socket = socket
|
||||
|
||||
socket.onopen = function(e) {
|
||||
wrapper.retryCount = 0
|
||||
api.triggerEvent(socketElt, 'htmx:wsOpen', { event: e, socketWrapper: wrapper.publicInterface })
|
||||
wrapper.handleQueuedMessages()
|
||||
}
|
||||
|
||||
socket.onclose = function(e) {
|
||||
// If socket should not be connected, stop further attempts to establish connection
|
||||
// If Abnormal Closure/Service Restart/Try Again Later, then set a timer to reconnect after a pause.
|
||||
if (!maybeCloseWebSocketSource(socketElt) && [1006, 1012, 1013].indexOf(e.code) >= 0) {
|
||||
var delay = getWebSocketReconnectDelay(wrapper.retryCount)
|
||||
setTimeout(function() {
|
||||
wrapper.retryCount += 1
|
||||
wrapper.init()
|
||||
}, delay)
|
||||
}
|
||||
|
||||
// Notify client code that connection has been closed. Client code can inspect `event` field
|
||||
// to determine whether closure has been valid or abnormal
|
||||
api.triggerEvent(socketElt, 'htmx:wsClose', { event: e, socketWrapper: wrapper.publicInterface })
|
||||
}
|
||||
|
||||
socket.onerror = function(e) {
|
||||
api.triggerErrorEvent(socketElt, 'htmx:wsError', { error: e, socketWrapper: wrapper })
|
||||
maybeCloseWebSocketSource(socketElt)
|
||||
}
|
||||
|
||||
var events = this.events
|
||||
Object.keys(events).forEach(function(k) {
|
||||
events[k].forEach(function(e) {
|
||||
socket.addEventListener(k, e)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
close: function() {
|
||||
this.socket.close()
|
||||
}
|
||||
}
|
||||
|
||||
wrapper.init()
|
||||
|
||||
wrapper.publicInterface = {
|
||||
send: wrapper.send.bind(wrapper),
|
||||
sendImmediately: wrapper.sendImmediately.bind(wrapper),
|
||||
queue: wrapper.messageQueue
|
||||
}
|
||||
|
||||
return wrapper
|
||||
}
|
||||
|
||||
/**
|
||||
* ensureWebSocketSend attaches trigger handles to elements with
|
||||
* "ws-send" attribute
|
||||
* @param {HTMLElement} elt
|
||||
*/
|
||||
function ensureWebSocketSend(elt) {
|
||||
var legacyAttribute = api.getAttributeValue(elt, 'hx-ws')
|
||||
if (legacyAttribute && legacyAttribute !== 'send') {
|
||||
return
|
||||
}
|
||||
|
||||
var webSocketParent = api.getClosestMatch(elt, hasWebSocket)
|
||||
processWebSocketSend(webSocketParent, elt)
|
||||
}
|
||||
|
||||
/**
|
||||
* hasWebSocket function checks if a node has webSocket instance attached
|
||||
* @param {HTMLElement} node
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function hasWebSocket(node) {
|
||||
return api.getInternalData(node).webSocket != null
|
||||
}
|
||||
|
||||
/**
|
||||
* processWebSocketSend adds event listeners to the <form> element so that
|
||||
* messages can be sent to the WebSocket server when the form is submitted.
|
||||
* @param {HTMLElement} socketElt
|
||||
* @param {HTMLElement} sendElt
|
||||
*/
|
||||
function processWebSocketSend(socketElt, sendElt) {
|
||||
var nodeData = api.getInternalData(sendElt)
|
||||
var triggerSpecs = api.getTriggerSpecs(sendElt)
|
||||
triggerSpecs.forEach(function(ts) {
|
||||
api.addTriggerHandler(sendElt, ts, nodeData, function(elt, evt) {
|
||||
if (maybeCloseWebSocketSource(socketElt)) {
|
||||
return
|
||||
}
|
||||
|
||||
/** @type {WebSocketWrapper} */
|
||||
var socketWrapper = api.getInternalData(socketElt).webSocket
|
||||
var headers = api.getHeaders(sendElt, api.getTarget(sendElt))
|
||||
var results = api.getInputValues(sendElt, 'post')
|
||||
var errors = results.errors
|
||||
var rawParameters = Object.assign({}, results.values)
|
||||
var expressionVars = api.getExpressionVars(sendElt)
|
||||
var allParameters = api.mergeObjects(rawParameters, expressionVars)
|
||||
var filteredParameters = api.filterValues(allParameters, sendElt)
|
||||
|
||||
var sendConfig = {
|
||||
parameters: filteredParameters,
|
||||
unfilteredParameters: allParameters,
|
||||
headers,
|
||||
errors,
|
||||
|
||||
triggeringEvent: evt,
|
||||
messageBody: undefined,
|
||||
socketWrapper: socketWrapper.publicInterface
|
||||
}
|
||||
|
||||
if (!api.triggerEvent(elt, 'htmx:wsConfigSend', sendConfig)) {
|
||||
return
|
||||
}
|
||||
|
||||
if (errors && errors.length > 0) {
|
||||
api.triggerEvent(elt, 'htmx:validation:halted', errors)
|
||||
return
|
||||
}
|
||||
|
||||
var body = sendConfig.messageBody
|
||||
if (body === undefined) {
|
||||
var toSend = Object.assign({}, sendConfig.parameters)
|
||||
if (sendConfig.headers) { toSend.HEADERS = headers }
|
||||
body = JSON.stringify(toSend)
|
||||
}
|
||||
|
||||
socketWrapper.send(body, elt)
|
||||
|
||||
if (evt && api.shouldCancel(evt, elt)) {
|
||||
evt.preventDefault()
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* getWebSocketReconnectDelay is the default easing function for WebSocket reconnects.
|
||||
* @param {number} retryCount // The number of retries that have already taken place
|
||||
* @returns {number}
|
||||
*/
|
||||
function getWebSocketReconnectDelay(retryCount) {
|
||||
/** @type {"full-jitter" | ((retryCount:number) => number)} */
|
||||
var delay = htmx.config.wsReconnectDelay
|
||||
if (typeof delay === 'function') {
|
||||
return delay(retryCount)
|
||||
}
|
||||
if (delay === 'full-jitter') {
|
||||
var exp = Math.min(retryCount, 6)
|
||||
var maxDelay = 1000 * Math.pow(2, exp)
|
||||
return maxDelay * Math.random()
|
||||
}
|
||||
|
||||
logError('htmx.config.wsReconnectDelay must either be a function or the string "full-jitter"')
|
||||
}
|
||||
|
||||
/**
|
||||
* maybeCloseWebSocketSource checks to the if the element that created the WebSocket
|
||||
* still exists in the DOM. If NOT, then the WebSocket is closed and this function
|
||||
* returns TRUE. If the element DOES EXIST, then no action is taken, and this function
|
||||
* returns FALSE.
|
||||
*
|
||||
* @param {*} elt
|
||||
* @returns
|
||||
*/
|
||||
function maybeCloseWebSocketSource(elt) {
|
||||
if (!api.bodyContains(elt)) {
|
||||
api.getInternalData(elt).webSocket.close()
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* createWebSocket is the default method for creating new WebSocket objects.
|
||||
* it is hoisted into htmx.createWebSocket to be overridden by the user, if needed.
|
||||
*
|
||||
* @param {string} url
|
||||
* @returns WebSocket
|
||||
*/
|
||||
function createWebSocket(url) {
|
||||
var sock = new WebSocket(url, [])
|
||||
sock.binaryType = htmx.config.wsBinaryType
|
||||
return sock
|
||||
}
|
||||
|
||||
/**
|
||||
* queryAttributeOnThisOrChildren returns all nodes that contain the requested attributeName, INCLUDING THE PROVIDED ROOT ELEMENT.
|
||||
*
|
||||
* @param {HTMLElement} elt
|
||||
* @param {string} attributeName
|
||||
*/
|
||||
function queryAttributeOnThisOrChildren(elt, attributeName) {
|
||||
var result = []
|
||||
|
||||
// If the parent element also contains the requested attribute, then add it to the results too.
|
||||
if (api.hasAttribute(elt, attributeName) || api.hasAttribute(elt, 'hx-ws')) {
|
||||
result.push(elt)
|
||||
}
|
||||
|
||||
// Search all child nodes that match the requested attribute
|
||||
elt.querySelectorAll('[' + attributeName + '], [data-' + attributeName + '], [data-hx-ws], [hx-ws]').forEach(function(node) {
|
||||
result.push(node)
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {T[]} arr
|
||||
* @param {(T) => void} func
|
||||
*/
|
||||
function forEach(arr, func) {
|
||||
if (arr) {
|
||||
for (var i = 0; i < arr.length; i++) {
|
||||
func(arr[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
})()
|
|
@ -1,130 +0,0 @@
|
|||
from microdot import Microdot
|
||||
from microdot import send_file
|
||||
from microdot.websocket import with_websocket
|
||||
import random, json, time, msgpack
|
||||
import logging
|
||||
from microdot import Request
|
||||
|
||||
# Enable 500 kB files in the webui
|
||||
Request.max_content_length = 1024 * 1024 * 0.5
|
||||
Request.max_body_length = 1024 * 1024 * 0.5
|
||||
Request.max_readline = 1024 * 1024
|
||||
|
||||
|
||||
class Server:
|
||||
def __init__(self, transmitter, catch, onodeID, network, cLog):
|
||||
self.cLog = cLog
|
||||
self.t = transmitter
|
||||
self.network = network
|
||||
self.network.addLookup(onodeID, self.t.interface.localNode.nodeNum)
|
||||
self.nodeID = str(onodeID)
|
||||
self.peerIDs = {}
|
||||
self.app = Microdot()
|
||||
self.catch = catch
|
||||
self.nmap = {self.nodeID: self.t.interface.localNode.nodeNum}
|
||||
# self.cLog(20, "Initialized server")
|
||||
|
||||
@self.app.route("/res/<path:path>")
|
||||
async def static(request, path):
|
||||
if ".." in path:
|
||||
# directory traversal is not allowed
|
||||
return "Not found", 404
|
||||
return send_file("webui/build/res/" + path, max_age=86400)
|
||||
|
||||
@self.app.route("/bubble")
|
||||
@with_websocket
|
||||
async def bubble(request, ws):
|
||||
while True:
|
||||
r = await ws.receive()
|
||||
message = json.loads(r)
|
||||
trigger = message["HEADERS"]["HX-Trigger"]
|
||||
# TODO: Drop old id from cache on regen
|
||||
if trigger == "gpID":
|
||||
peerID = str(random.randrange(0, 1000000)).zfill(6)
|
||||
await ws.send(
|
||||
"""
|
||||
<p id="vpeerID">Peer ID: {0}</p>
|
||||
""".format(
|
||||
peerID
|
||||
)
|
||||
)
|
||||
await ws.send(
|
||||
"""
|
||||
<input id="peerID" type="hidden" value="{0}" >
|
||||
""".format(
|
||||
peerID
|
||||
)
|
||||
)
|
||||
await ws.send(
|
||||
"""
|
||||
<p id="vnodeID">Node ID: {0}</p>
|
||||
""".format(
|
||||
self.nodeID
|
||||
)
|
||||
)
|
||||
await ws.send(
|
||||
""" <input id="nodeID" type="hidden" value="{0}" >""".format(
|
||||
self.nodeID
|
||||
)
|
||||
)
|
||||
await ws.send(
|
||||
"<input id='gID' type='hidden' value='{0}' hx-swap-oob='true'>".format(
|
||||
peerID
|
||||
)
|
||||
)
|
||||
await ws.send(
|
||||
"<input type='hidden' name='eID' value='{0}' hx-swap-oob='true'>".format(
|
||||
peerID
|
||||
)
|
||||
)
|
||||
peer = {"usedLast": round(time.time() * 1000), "ws": ws}
|
||||
self.peerIDs[peerID] = peer
|
||||
elif trigger == "bubble":
|
||||
sender = message["bID"]
|
||||
data = message["chat_message"]
|
||||
# TODO: Setting sender name id
|
||||
# senderName = message["senderID"]
|
||||
senderName = 000000
|
||||
recipient = message["recipientID"]
|
||||
recipientNode = message["recipientNode"]
|
||||
await self.t.addPackets(
|
||||
msgpack.dumps({"data": data}),
|
||||
sender,
|
||||
senderName,
|
||||
recipient,
|
||||
int(recipientNode),
|
||||
directID=self.network.doLookup(recipientNode),
|
||||
packetsClass=2,
|
||||
)
|
||||
elif trigger == "catch":
|
||||
res = self.catch.get(message["head"], message["body"])
|
||||
await ws.send('<div id="catchDisplay">{0}</div>'.format(res))
|
||||
# TODO: Catch update packets
|
||||
# DONE: Shared catch/cache objects
|
||||
elif trigger == "catchEdit":
|
||||
self.catch.addc(
|
||||
message["eID"],
|
||||
self.nodeID,
|
||||
message["sep"],
|
||||
message["head"],
|
||||
message["body"],
|
||||
{"html": message["catchContent"]},
|
||||
)
|
||||
await ws.send(
|
||||
"""
|
||||
<ul id="resultsCatch" hx-swap-oob='true'><li>OK</li></ul>
|
||||
"""
|
||||
)
|
||||
else:
|
||||
await ws.send(
|
||||
"""<div id="chat_room" hx-swap-oob="beforeend">hi</div>"""
|
||||
)
|
||||
|
||||
@self.app.route("/")
|
||||
async def index(request):
|
||||
return send_file("webui/build/index/index.html")
|
||||
|
||||
async def sendToPeer(self, peerID, data):
|
||||
await self.peerIDs[peerID]["ws"].send(
|
||||
"<ul id='chat_room' hx-swap-oob='afterend'><li>{0}</li></ul>".format(data)
|
||||
)
|
|
@ -1,22 +0,0 @@
|
|||
{% extends "shared/base.html" %}
|
||||
{% block body %}
|
||||
|
||||
<img alt="PierMesh logo" height="128px" src="/res/img/logo.png">
|
||||
<br>
|
||||
<br>
|
||||
{% include "shared/catch.nav.html" %}
|
||||
<br>
|
||||
{% include "shared/catch.editor.html" %}
|
||||
<div hx-history="false">
|
||||
</div>
|
||||
<br>
|
||||
<div class="plank" hx-ext="ws" ws-connect="/bubble">
|
||||
<p id="vpeerID">Peer ID:</p>
|
||||
<input id="peerID" type="hidden" >
|
||||
<p id="vnodeID">Node ID:</p>
|
||||
<input id="peerID" type="hidden" >
|
||||
<button id="gpID" ws-send>Connect</button>
|
||||
</div>
|
||||
<br>
|
||||
{% include "shared/messenger.html" %}
|
||||
{% endblock %}
|
|
@ -1,21 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>{{ title }}</title>
|
||||
<link rel="stylesheet" type="text/css" href="/res/css/fonts.css">
|
||||
<link rel="stylesheet" type="text/css" href="/res/css/style.css">
|
||||
<script src="/res/js/node_modules/htmx.org/dist/htmx.min.js"></script>
|
||||
<script src="/res/js/ws.js">
|
||||
</script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
{% include "shared/nav.html" %}
|
||||
{% block body %}
|
||||
{% endblock %}
|
||||
</body>
|
||||
|
||||
</html>
|
|
@ -1,23 +0,0 @@
|
|||
<div class="plank" hx-ext="ws" ws-connect="/bubble">
|
||||
<img src="/res/img/catchdisplay.png">
|
||||
<br>
|
||||
Catch publisher<br>
|
||||
<ul id="resultsCatch">
|
||||
</ul>
|
||||
<form id="catchEdit" ws-send>
|
||||
Head <br> <input type="text" name="head" size="4" maxlength="4"><br>
|
||||
Seperator <br> <input type="text" name="sep" size="1" maxlength="1"><br>
|
||||
Body <br> <input type="text" name="body" size="16" maxlength="16"><br>
|
||||
Fins<br>
|
||||
<ul id="fins">
|
||||
<li class="fin"> <input type="text" size="8" maxlength="8"> </li>
|
||||
<li><button>+</button></li>
|
||||
</ul>
|
||||
Content
|
||||
<br>
|
||||
<textarea style="min-width: 200px;min-height:200px;" name="catchContent"></textarea>
|
||||
<br>
|
||||
<button onclick="document.getElementById('eID').value = document.getElementById('peerID').value">Publish</button>
|
||||
<input type="hidden" name="eID" id="eID">
|
||||
</form>
|
||||
</div>
|
|
@ -1 +0,0 @@
|
|||
<div style="background-color: var(--palette-three);" id="catchDisplay"></div>
|
|
@ -1,31 +0,0 @@
|
|||
<div class="plank" hx-ext="ws" ws-connect="/bubble">
|
||||
<img src="/res/img/catchdisplay.png">
|
||||
<br>
|
||||
Catch<br><br>
|
||||
<form id="catch" ws-send>
|
||||
<label for="head">Head (max. 4 characters)</label>
|
||||
<br>
|
||||
<input type="text" id="head" name="head" size="4" maxlength="4">
|
||||
<br>
|
||||
<label for="sep">Seperator</label>
|
||||
<br>
|
||||
<input type="text" id="sep" name="sep" size="1" maxlength="1"><br>
|
||||
<label for="body">Body (max. 16 characters)</label>
|
||||
<br>
|
||||
<input type="text" id="body" name="body" size="16" maxlength="16">
|
||||
<ul id="fins">
|
||||
Fins:
|
||||
<li class="fin">
|
||||
<input type="text" size="8" maxlength="8">
|
||||
</li>
|
||||
<li>
|
||||
<button>+</button>
|
||||
</li>
|
||||
</ul>
|
||||
<button>Get</button>
|
||||
</form>
|
||||
Results:
|
||||
<br>
|
||||
{% include "shared/catch.html" %}
|
||||
<br>
|
||||
</div>
|
|
@ -1,20 +0,0 @@
|
|||
<div class="plank" hx-ext="ws" ws-connect="/bubble">
|
||||
<img src="/res/img/bubbledisplay.png">
|
||||
<br>
|
||||
Bubble
|
||||
<br>
|
||||
<br>
|
||||
Responses: <ul id="chat_room" hx-swap="afterend">
|
||||
</ul>
|
||||
<br>
|
||||
<form id="bubble" ws-send>
|
||||
Peer ID:<br>
|
||||
<input name="recipientID" id="recipientID" type="number" max="999999"><br>
|
||||
Node ID:<br>
|
||||
<input name="recipientNode" id="recipientNode" type="number" max="999999"><br>
|
||||
Data<br> <textarea style="min-width: 200px;min-height: 200px;" type="textarea" name="chat_message"></textarea>
|
||||
<br>
|
||||
<input type="hidden" name="bID" id="bID">
|
||||
<button onclick="document.getElementById('bID').value = document.getElementById('peerID').value">Send</button>
|
||||
</form>
|
||||
</div>
|
|
@ -1 +0,0 @@
|
|||
|