Ptototype: More clean up and a preliminary webrtc test

This commit is contained in:
Agie Ashwood 2024-12-06 16:08:45 -07:00
parent 46997b1f95
commit 795b53bdba
23 changed files with 704 additions and 1205 deletions

View File

@ -1,5 +1,6 @@
from Daisy.Daisy import Daisy from Daisy.Daisy import Daisy
# TODO: More tooling
class Ref(Daisy): class Ref(Daisy):
""" """

12
src/Daisy/Replica.py Normal file
View File

@ -0,0 +1,12 @@
from .Ref import Ref
import datetime
class Replica(Ref):
"""
A Replica is a fallback copy of a distributed asset that is updated by a queued update message
"""
lastUpdate = datetime.datetime.now()
def __init__(self, data: dict, metadata: dict, path: str, remoteNodeID: str):
super().__init__(metadata+data, path, remoteNodeID)

View File

@ -28,9 +28,11 @@ copy_tree("res", "build/res")
shutil.copyfile("htmx-extensions/src/ws/ws.js", "build/res/js/ws.js") shutil.copyfile("htmx-extensions/src/ws/ws.js", "build/res/js/ws.js")
tpath = "templates/" tpath = "templates/"
shutil.copyfile("res/js/p2chat.js", "templates/res/p2chat.js")
shutil.copyfile("res/css/style.css", "templates/res/style.css")
for path in os.listdir(tpath): for path in os.listdir(tpath):
if ("base" in path) != True: if (("base" in path) != True) and (("res" in path) != True) :
for t in os.listdir(tpath + path): for t in os.listdir(tpath + path):
if os.path.exists("build/" + path) != True: if os.path.exists("build/" + path) != True:
os.makedirs("build/" + path) os.makedirs("build/" + path)

View File

@ -10,18 +10,20 @@
--palette-four: #61805B; --palette-four: #61805B;
--grid-columns: 8; --grid-columns: 8;
--grid-rows: 8; --grid-rows: 8;
--grid-size: 20px;
} }
/**
#controls { #controls {
display: grid; display: grid;
grid-template-columns: repeat(var(--grid-columns), auto); grid-template-columns: repeat(var(--grid-columns), auto);
gap: 5%; gap: 5%;
} }**/
#render { #render {
display: grid; display: grid;
grid-template-columns: repeat(var(--grid-columns), 20px); grid-template-columns: repeat(var(--grid-columns), var(--grid-size));
grid-template-rows: repeat(var(--grid-rows), 20px); grid-template-rows: repeat(var(--grid-rows), var(--grid-size));
border: 1px solid black; border: 1px solid black;
} }
@ -56,4 +58,10 @@ input[type=text],
input[type=number] { input[type=number] {
min-width: 150px; min-width: 150px;
max-width: 150px; max-width: 150px;
}
.cnum {
min-width: 20px !important;
max-width: 20px !important;
width: 10px !important;
} }

View File

@ -42,123 +42,4 @@ function getCatch() {
getFins(); getFins();
} }
// P2Chat code
function splash(that) {
//alert(parent.id);
//alert(parent.getAttribute("data-coord"));
//alert(that.value);
that.style.backgroundColor = document.querySelector("#color").value;
}
function cGen(that) {
document.getElementById("render").innerHTML = "";
var parent = that.parentElement;
// alert("parent");
var canvasX = Number(document.querySelector("#canvasX").value);
// alert("x");
canvasX = Math.floor(canvasX);
document
.querySelector(":root")
.style.setProperty("--grid-rows", "" + canvasX);
// alert("grid");
var canvasY = Number(document.querySelector("#canvasY").value);
//alert(document.querySelector("#canvasY").value);
canvasY = Math.floor(canvasY);
document
.querySelector(":root")
.style.setProperty("--grid-columns", "" + canvasY);
//alert(canvasY);
var nodeRender = "";
var cloneRender = "";
var nodeControl = "";
var cloneControl = "";
//alert("start loop");
for (let x = 0; x < canvasX; x++) {
for (let y = 0; y < canvasY; y++) {
//alert(" in");
nodeRender = document.getElementById("rendertemplate");
//alert(" past template");
cloneRender = nodeRender.cloneNode(true);
cloneRender.style.display = "grid";
cloneRender.id = "i" + x + "x" + y;
if (y == 0) {
//alert(cloneRender.innerHTML);
}
document.getElementById("render").appendChild(cloneRender);
}
}
}
function setColor(that) {
var color = that.value;
//alert(typeof color);
if (color.includes("#")) {
document.querySelector("#color").value = color;
} else {
document.querySelector("#color").value = "#" + color;
document.querySelector("#picker").value = "#" + color;
}
}
function saveAs(uri, filename) {
var link = document.createElement("a");
if (typeof link.download === "string") {
link.href = uri;
link.download = filename;
//Firefox requires the link to be in the body
document.body.appendChild(link);
//simulate click
link.click();
//remove the link when done
document.body.removeChild(link);
} else {
window.open(uri);
}
}
function save(toFile) {
var canvas = document.createElement("canvas");
var canvasX = Number(document.querySelector("#canvasX").value);
var canvasY = Number(document.querySelector("#canvasY").value);
//alert(canvasX);
//alert(canvasY);
canvas.width = canvasY;
canvas.height = canvasX;
var ctx = canvas.getContext("2d");
var x = 0;
var y = 0;
for (x = 0; x < canvasX; x++) {
for (y = 0; y < canvasY; y++) {
//alert(document.querySelector("#i" + x + "x" + y).style.backgroundColor);
//alert("before fill style");
ctx.fillStyle = document.querySelector(
"#i" + x + "x" + y,
).style.backgroundColor;
//ctx.fillStyle = "#00ff00";
//alert("after fill style");
ctx.fillRect(y, x, 1, 1);
}
}
if (toFile) {
saveAs(canvas.toDataURL("image/png"), " download.png");
} else {
document.getElementById("p2img").value = canvas.toDataURL("image/png");
}
}
function p2ToBubble() {
save();
var bub = new Object();
bub.img = document.getElementById("p2img").value;
document.getElementById("chat_message").value += JSON.stringify(bub);
}
document.addEventListener("DOMContentLoaded", function(event) {
setColor(document.getElementById("picker"));
cGen(document.getElementById("canvasY"));
});

159
src/Splash/res/js/p2chat.js Normal file
View File

@ -0,0 +1,159 @@
// P2Chat code
function splash(that) {
//alert(parent.id);
//alert(parent.getAttribute("data-coord"));
//alert(that.value);
var erase = document.getElementById("erase").checked;
console.log(erase);
if (erase){
that.style.backgroundColor = document.getElementById("rendertemplate").style.backgroundColor;
} else {
that.style.backgroundColor = document.querySelector("#color").value;
}
save(false, true);
}
function cGen(that) {
document.getElementById("render").innerHTML = "";
var parent = that.parentElement;
// alert("parent");
var canvasX = Number(document.querySelector("#canvasX").value);
// alert("x");
canvasX = Math.floor(canvasX);
document
.querySelector(":root")
.style.setProperty("--grid-rows", "" + canvasX);
// alert("grid");
var canvasY = Number(document.querySelector("#canvasY").value);
//alert(document.querySelector("#canvasY").value);
canvasY = Math.floor(canvasY);
document
.querySelector(":root")
.style.setProperty("--grid-columns", "" + canvasY);
//alert(canvasY);
var nodeRender = "";
var cloneRender = "";
var nodeControl = "";
var cloneControl = "";
//alert("start loop");
for (let x = 0; x < canvasX; x++) {
for (let y = 0; y < canvasY; y++) {
//alert(" in");
nodeRender = document.getElementById("rendertemplate");
//alert(" past template");
cloneRender = nodeRender.cloneNode(true);
cloneRender.style.display = "grid";
cloneRender.id = "i" + x + "x" + y;
if (y == 0) {
//alert(cloneRender.innerHTML);
}
document.getElementById("render").appendChild(cloneRender);
}
}
}
function setColor(that) {
var color = that.value;
//alert(typeof color);
if (color.includes("#")) {
document.querySelector("#color").value = color;
} else {
document.querySelector("#color").value = "#" + color;
document.querySelector("#picker").value = "#" + color;
}
}
function saveAs(uri, filename) {
var link = document.createElement("a");
if (typeof link.download === "string") {
link.href = uri;
link.download = filename;
//Firefox requires the link to be in the body
document.body.appendChild(link);
//simulate click
link.click();
//remove the link when done
document.body.removeChild(link);
} else {
window.open(uri);
}
}
var lastActUndo = false;
var frameCache = [];
function undo(){
if (frameCache.length > 0){
var curCache = frameCache.pop();
console.log(lastActUndo);
if (lastActUndo != true){
curCache = frameCache.pop();
}
for (x = 0; x < curCache.length; x++) {
var curCacheRow = [];
for (y = 0; y < curCache[x].length; y++) {
document.querySelector(
"#i" + x + "x" + y,
).style.backgroundColor = curCache[x][y];
}
}
}
lastActUndo = true;
}
function save(toFile, toFrameCache) {
var canvas = document.createElement("canvas");
var canvasX = Number(document.querySelector("#canvasX").value);
var canvasY = Number(document.querySelector("#canvasY").value);
//alert(canvasX);
//alert(canvasY);
canvas.width = canvasY;
canvas.height = canvasX;
var ctx = canvas.getContext("2d");
var x = 0;
var y = 0;
var curCache = [];
for (x = 0; x < canvasX; x++) {
var curCacheRow = [];
for (y = 0; y < canvasY; y++) {
var curColor = document.querySelector(
"#i" + x + "x" + y,
).style.backgroundColor;
ctx.fillStyle = curColor;
//ctx.fillStyle = "#00ff00";
//alert("after fill style");
if (toFrameCache){
curCacheRow.push(curColor);
} else {
ctx.fillRect(y, x, 1, 1);
}
}
curCache.push(curCacheRow);
}
if (toFile) {
saveAs(canvas.toDataURL("image/png"), " download.png");
} else if (toFrameCache){
lastActUndo = false;
frameCache.push(curCache);
} else {
document.getElementById("p2img").value = canvas.toDataURL("image/png");
}
}
function p2ToBubble() {
save(false, false);
var bub = new Object();
bub.img = document.getElementById("p2img").value;
document.getElementById("chat_message").value += JSON.stringify(bub);
}
document.addEventListener("DOMContentLoaded", function(event) {
setColor(document.getElementById("picker"));
cGen(document.getElementById("canvasY"));
});

View File

@ -10,6 +10,7 @@
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"htmx.org": "2.0.0", "htmx.org": "2.0.0",
"simple-peer": "^9.11.1",
"three": "^0.166.1" "three": "^0.166.1"
}, },
"devDependencies": { "devDependencies": {
@ -598,6 +599,69 @@
"integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==",
"dev": true "dev": true
}, },
"node_modules/base64-js": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/buffer": {
"version": "6.0.3",
"resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz",
"integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"dependencies": {
"base64-js": "^1.3.1",
"ieee754": "^1.2.1"
}
},
"node_modules/debug": {
"version": "4.3.7",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz",
"integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==",
"dependencies": {
"ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/err-code": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/err-code/-/err-code-3.0.1.tgz",
"integrity": "sha512-GiaH0KJUewYok+eeY05IIgjtAe4Yltygk9Wqp1V5yVWLdhf0hYZchRjNIT9bb0mSwRcIusT3cx7PJUf3zEIfUA=="
},
"node_modules/esbuild": { "node_modules/esbuild": {
"version": "0.21.5", "version": "0.21.5",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz",
@ -650,11 +714,45 @@
"node": "^8.16.0 || ^10.6.0 || >=11.0.0" "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
} }
}, },
"node_modules/get-browser-rtc": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/get-browser-rtc/-/get-browser-rtc-1.1.0.tgz",
"integrity": "sha512-MghbMJ61EJrRsDe7w1Bvqt3ZsBuqhce5nrn/XAwgwOXhcsz53/ltdxOse1h/8eKXj5slzxdsz56g5rzOFSGwfQ=="
},
"node_modules/htmx.org": { "node_modules/htmx.org": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/htmx.org/-/htmx.org-2.0.0.tgz", "resolved": "https://registry.npmjs.org/htmx.org/-/htmx.org-2.0.0.tgz",
"integrity": "sha512-N0r1VjrqeCpig0mTi2/sooDZBeQlp1RBohnWQ/ufqc7ICaI0yjs04fNGhawm6+/HWhJFlcXn8MqOjWI9QGG2lQ==" "integrity": "sha512-N0r1VjrqeCpig0mTi2/sooDZBeQlp1RBohnWQ/ufqc7ICaI0yjs04fNGhawm6+/HWhJFlcXn8MqOjWI9QGG2lQ=="
}, },
"node_modules/ieee754": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
"integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
},
"node_modules/nanoid": { "node_modules/nanoid": {
"version": "3.3.7", "version": "3.3.7",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz",
@ -707,6 +805,46 @@
"node": "^10 || ^12 || >=14" "node": "^10 || ^12 || >=14"
} }
}, },
"node_modules/queue-microtask": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
"integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/randombytes": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
"integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==",
"dependencies": {
"safe-buffer": "^5.1.0"
}
},
"node_modules/readable-stream": {
"version": "3.6.2",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
"dependencies": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/rollup": { "node_modules/rollup": {
"version": "4.19.0", "version": "4.19.0",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.19.0.tgz", "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.19.0.tgz",
@ -742,6 +880,53 @@
"fsevents": "~2.3.2" "fsevents": "~2.3.2"
} }
}, },
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/simple-peer": {
"version": "9.11.1",
"resolved": "https://registry.npmjs.org/simple-peer/-/simple-peer-9.11.1.tgz",
"integrity": "sha512-D1SaWpOW8afq1CZGWB8xTfrT3FekjQmPValrqncJMX7QFl8YwhrPTZvMCANLtgBwwdS+7zURyqxDDEmY558tTw==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"dependencies": {
"buffer": "^6.0.3",
"debug": "^4.3.2",
"err-code": "^3.0.1",
"get-browser-rtc": "^1.1.0",
"queue-microtask": "^1.2.3",
"randombytes": "^2.1.0",
"readable-stream": "^3.6.0"
}
},
"node_modules/source-map-js": { "node_modules/source-map-js": {
"version": "1.2.0", "version": "1.2.0",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz",
@ -751,11 +936,24 @@
"node": ">=0.10.0" "node": ">=0.10.0"
} }
}, },
"node_modules/string_decoder": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
"dependencies": {
"safe-buffer": "~5.2.0"
}
},
"node_modules/three": { "node_modules/three": {
"version": "0.166.1", "version": "0.166.1",
"resolved": "https://registry.npmjs.org/three/-/three-0.166.1.tgz", "resolved": "https://registry.npmjs.org/three/-/three-0.166.1.tgz",
"integrity": "sha512-LtuafkKHHzm61AQA1be2MAYIw1IjmhOUxhBa0prrLpEMWbV7ijvxCRHjSgHPGp2493wLBzwKV46tA9nivLEgKg==" "integrity": "sha512-LtuafkKHHzm61AQA1be2MAYIw1IjmhOUxhBa0prrLpEMWbV7ijvxCRHjSgHPGp2493wLBzwKV46tA9nivLEgKg=="
}, },
"node_modules/util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
},
"node_modules/vite": { "node_modules/vite": {
"version": "5.3.4", "version": "5.3.4",
"resolved": "https://registry.npmjs.org/vite/-/vite-5.3.4.tgz", "resolved": "https://registry.npmjs.org/vite/-/vite-5.3.4.tgz",

View File

@ -10,6 +10,7 @@
"description": "", "description": "",
"dependencies": { "dependencies": {
"htmx.org": "2.0.0", "htmx.org": "2.0.0",
"simple-peer": "^9.11.1",
"three": "^0.166.1" "three": "^0.166.1"
}, },
"devDependencies": { "devDependencies": {

View File

@ -22,4 +22,6 @@
{% include "shared/catch.editor.html" %} {% include "shared/catch.editor.html" %}
<br> <br>
{% include "shared/messenger.html" %} {% include "shared/messenger.html" %}
<br>
{% include "shared/webrtc.html" %}
{% endblock %} {% endblock %}

View File

@ -0,0 +1,11 @@
{% set standAlone = True %}
{% extends "shared/base.html" %}
{% block body %}
<br>
<div class="plank">
{% include "shared/p2chat.html" %}
<br>
Built for the PierMesh project
<a href="https://piermesh.net">https://piermesh.net</a>
</div>
{% endblock %}

View File

@ -0,0 +1,159 @@
// P2Chat code
function splash(that) {
//alert(parent.id);
//alert(parent.getAttribute("data-coord"));
//alert(that.value);
var erase = document.getElementById("erase").checked;
console.log(erase);
if (erase){
that.style.backgroundColor = document.getElementById("rendertemplate").style.backgroundColor;
} else {
that.style.backgroundColor = document.querySelector("#color").value;
}
save(false, true);
}
function cGen(that) {
document.getElementById("render").innerHTML = "";
var parent = that.parentElement;
// alert("parent");
var canvasX = Number(document.querySelector("#canvasX").value);
// alert("x");
canvasX = Math.floor(canvasX);
document
.querySelector(":root")
.style.setProperty("--grid-rows", "" + canvasX);
// alert("grid");
var canvasY = Number(document.querySelector("#canvasY").value);
//alert(document.querySelector("#canvasY").value);
canvasY = Math.floor(canvasY);
document
.querySelector(":root")
.style.setProperty("--grid-columns", "" + canvasY);
//alert(canvasY);
var nodeRender = "";
var cloneRender = "";
var nodeControl = "";
var cloneControl = "";
//alert("start loop");
for (let x = 0; x < canvasX; x++) {
for (let y = 0; y < canvasY; y++) {
//alert(" in");
nodeRender = document.getElementById("rendertemplate");
//alert(" past template");
cloneRender = nodeRender.cloneNode(true);
cloneRender.style.display = "grid";
cloneRender.id = "i" + x + "x" + y;
if (y == 0) {
//alert(cloneRender.innerHTML);
}
document.getElementById("render").appendChild(cloneRender);
}
}
}
function setColor(that) {
var color = that.value;
//alert(typeof color);
if (color.includes("#")) {
document.querySelector("#color").value = color;
} else {
document.querySelector("#color").value = "#" + color;
document.querySelector("#picker").value = "#" + color;
}
}
function saveAs(uri, filename) {
var link = document.createElement("a");
if (typeof link.download === "string") {
link.href = uri;
link.download = filename;
//Firefox requires the link to be in the body
document.body.appendChild(link);
//simulate click
link.click();
//remove the link when done
document.body.removeChild(link);
} else {
window.open(uri);
}
}
var lastActUndo = false;
var frameCache = [];
function undo(){
if (frameCache.length > 0){
var curCache = frameCache.pop();
console.log(lastActUndo);
if (lastActUndo != true){
curCache = frameCache.pop();
}
for (x = 0; x < curCache.length; x++) {
var curCacheRow = [];
for (y = 0; y < curCache[x].length; y++) {
document.querySelector(
"#i" + x + "x" + y,
).style.backgroundColor = curCache[x][y];
}
}
}
lastActUndo = true;
}
function save(toFile, toFrameCache) {
var canvas = document.createElement("canvas");
var canvasX = Number(document.querySelector("#canvasX").value);
var canvasY = Number(document.querySelector("#canvasY").value);
//alert(canvasX);
//alert(canvasY);
canvas.width = canvasY;
canvas.height = canvasX;
var ctx = canvas.getContext("2d");
var x = 0;
var y = 0;
var curCache = [];
for (x = 0; x < canvasX; x++) {
var curCacheRow = [];
for (y = 0; y < canvasY; y++) {
var curColor = document.querySelector(
"#i" + x + "x" + y,
).style.backgroundColor;
ctx.fillStyle = curColor;
//ctx.fillStyle = "#00ff00";
//alert("after fill style");
if (toFrameCache){
curCacheRow.push(curColor);
} else {
ctx.fillRect(y, x, 1, 1);
}
}
curCache.push(curCacheRow);
}
if (toFile) {
saveAs(canvas.toDataURL("image/png"), " download.png");
} else if (toFrameCache){
lastActUndo = false;
frameCache.push(curCache);
} else {
document.getElementById("p2img").value = canvas.toDataURL("image/png");
}
}
function p2ToBubble() {
save(false, false);
var bub = new Object();
bub.img = document.getElementById("p2img").value;
document.getElementById("chat_message").value += JSON.stringify(bub);
}
document.addEventListener("DOMContentLoaded", function(event) {
setColor(document.getElementById("picker"));
cGen(document.getElementById("canvasY"));
});

View File

@ -0,0 +1,67 @@
:root {
--palette-text-white: #FFFFFF;
--palette-text-black: #000000;
--palette-text-three: #3A424D;
--palette-text-four: #5B8080;
--palette-one: #3A4D24;
--palette-two: #A6B08E;
--palette-three: #879B77;
--palette-four: #61805B;
--grid-columns: 8;
--grid-rows: 8;
--grid-size: 20px;
}
/**
#controls {
display: grid;
grid-template-columns: repeat(var(--grid-columns), auto);
gap: 5%;
}**/
#render {
display: grid;
grid-template-columns: repeat(var(--grid-columns), var(--grid-size));
grid-template-rows: repeat(var(--grid-rows), var(--grid-size));
border: 1px solid black;
}
html {
background-color: var(--palette-one);
color: var(--palette-text-white);
font-family: 'Ubuntu Nerd Font';
padding: 10px;
}
.plank {
padding: 10px;
background-color: var(--palette-two);
}
.plankInner {
display: none;
}
ul {
padding: 0;
list-style-type: none !important;
}
li {
padding-top: 5px;
text-decoration: none;
list-style-type: none;
}
input[type=text],
input[type=number] {
min-width: 150px;
max-width: 150px;
}
.cnum {
min-width: 20px !important;
max-width: 20px !important;
width: 10px !important;
}

View File

@ -5,13 +5,27 @@
<meta charset="UTF-8"> <meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{{ title }}</title> <title>{{ title }}</title>
{% if standAlone %}
<script>{% include "res/p2chat.js" %}</script>
<style>
{
% include "res/style.css" %
}
</style>
{% else %}
<link rel="stylesheet" type="text/css" href="/res/css/fonts.css"> <link rel="stylesheet" type="text/css" href="/res/css/fonts.css">
<link rel="stylesheet" type="text/css" href="/res/css/style.css"> <link rel="stylesheet" type="text/css" href="/res/css/style.css">
<script src="/res/js/node_modules/simple-peer/simplepeer.min.js">
</script>
<script src="/res/js/node_modules/htmx.org/dist/htmx.min.js"></script> <script src="/res/js/node_modules/htmx.org/dist/htmx.min.js"></script>
<script src="/res/js/ws.js"> <script src="/res/js/ws.js">
</script> </script>
<script src="/res/js/custom.js"> <script src="/res/js/custom.js">
</script> </script>
<script src="/res/js/p2chat.js">
</script>
{% endif %}
</head> </head>
<body> <body>

View File

@ -5,6 +5,7 @@
Responses: <ul id="chat_room" hx-swap="afterend"> Responses: <ul id="chat_room" hx-swap="afterend">
</ul> </ul>
<br> <br>
{% set standAlone = false %}
{% include "shared/p2chat.html" %} {% include "shared/p2chat.html" %}
Peer ID:<br> Peer ID:<br>
<input name="recipientID" id="recipientID" type="number" max="999999"><br> <input name="recipientID" id="recipientID" type="number" max="999999"><br>

View File

@ -1,20 +1,30 @@
<div id="p2chat"> <div id="p2chat">
<input type="hidden" id="p2img"> <input type="hidden" id="p2img">
<input type="hidden" id="color" value="#000000"> <input type="hidden" id="color" value="#000000">
Background color: Background color (pick a color with the color picker and then click the square to set it):
<div id="rendertemplate" style="max-width: 20px;min-height:20px;background-color: #000000;" <div id="rendertemplate" style="max-width: 20px;min-height:20px;background-color: #000000;"
onclick="try{splash(this);}catch(e){alert(e);}"></div> onclick="try{splash(this);}catch(e){alert(e);}"></div>
<br>
<div id="controls"> <div id="controls">
Color picker: <input type="color" onchange="try{setColor(this);}catch(e){alert(e);}" value="#ffffff" id="picker"> Color picker:<br> <input type="color" onchange="try{setColor(this);}catch(e){alert(e);}" value="#ffffff"
Hex input: <input type="text" maxlength="6" onchange="try{setColor(this);}catch(e){alert(e);}" value="000000" /> id="picker">
<br>
Hex input: <input type="text" maxlength="6" oninput="try{setColor(this);}catch(e){alert(e);}" value="000000" />
</div>
<div>
Erase? <input type="checkbox" id="erase"><br>
<button onclick="undo();">Undo</button>
</div> </div>
<br> <br>
<div id="create"> <div id="create">
X<input type="number" min="8" max="64" placeholder="8" id="canvasX" value="8" /> X: <input class="cnum" type="number" min="8" max="64" placeholder="8" id="canvasX" value="8" /><br>
Y<input type="number" min="8" max="64" placeholder="8" id="canvasY" value="8" /> Y: <input class="cnum" type="number" min="8" max="64" placeholder="8" id="canvasY" value="8" /><br>
<button onclick="try{cGen(this);}catch(e){alert(e);}">Generate workspace</button> <button onclick="try{cGen(this);}catch(e){alert(e);}">Generate workspace</button>
<br>
{% if standAlone %}
<button onclick="try{save(true, false);}catch(e){alert(e);}">Save</button>
{% else %}
<button onclick="try{p2ToBubble();}catch(e){alert(e);}">Add to message</button> <button onclick="try{p2ToBubble();}catch(e){alert(e);}">Add to message</button>
{% endif %}
</div> </div>
<br> <br>
<div id="render"> <div id="render">

View File

@ -0,0 +1,43 @@
<div class="plank">
<style>
#outgoing {
width: 600px;
word-wrap: break-word;
white-space: normal;
}
</style>
<form id="webrtc">
<textarea id="incoming"></textarea>
<button type="submit">submit</button>
</form>
<pre id="outgoing"></pre>
<script>
document.addEventListener("DOMContentLoaded", function (event) {
const p = new SimplePeer({
initiator: location.hash === '#1',
trickle: false
})
p.on('error', err => console.log('error', err))
p.on('signal', data => {
console.log('SIGNAL', JSON.stringify(data))
document.querySelector('#outgoing').textContent = JSON.stringify(data)
})
document.querySelector('#webrtc').addEventListener('submit', ev => {
ev.preventDefault()
p.signal(JSON.parse(document.querySelector('#incoming').value))
})
p.on('connect', () => {
console.log('CONNECT')
p.send('whatever' + Math.random())
})
p.on('data', data => {
console.log('data: ' + data)
})
});
</script>
</div>

6
src/TODO.md Normal file
View File

@ -0,0 +1,6 @@
Add a new tab with Catch contents like in Hopper
Import html into catch editor
Import catch as template
Export as one index.html file with inlined assets or zip without inlining
Automatic Replicas of Catchs
DCDN

View File

@ -1,296 +0,0 @@
import base64
import os
from cryptography.fernet import Fernet
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import dh
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
from cryptography.hazmat.primitives.serialization import (
Encoding,
NoEncryption,
ParameterFormat,
PublicFormat,
PrivateFormat,
)
import cryptography.hazmat.primitives.serialization as Serialization
import msgpack
from Daisy.Store import Store
# TODO: Different store directories per node
class DHEFern:
"""
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Cryptography/WhaleSong.py>`__
Attributes
----------
cLog
Method reference to `run.Node.cLog` so we can log to the ui from here
loadedParams: dict
In memory representations of cryptography parameters
loadedKeys: dict
In memory representations of cryptography keys
nodeNickname: str
Name of node for isolating configs when running multiple nodes
cache: Components.daisy.Cache
Daisy cache for use in storing cryptography information
publicKey
Public key for node
privateKey
Private key for node
"""
def __init__(self, cache, nodeNickname, cLog):
"""
Parameters
----------
cache: Components.daisy.Cache
Reference to the node instances Daisy cache
nodeNickname: str
Node nickname for record storage
cLog
Reference to `run.Node.cLog`
"""
self.cLog = cLog
self.stores = {}
self.loadedParams = {}
self.loadedKeys = {}
self.nodeNickname = nodeNickname
self.cache = cache
if os.path.exists("daisy/cryptography/{0}/param".format(nodeNickname)) == False:
self.initStore("param")
else:
self.stores["param"] = Store("param", "cryptography", nodeNickname)
self.params = self.loadParamBytes(self.stores["param"].get()["self"])
self.cLog(20, "Param store initialized")
if os.path.exists("daisy/cryptography/{0}/key".format(nodeNickname)) == False:
self.cLog(20, "Key store DNE, initializing")
self.initStore("key")
self.genKeyPair()
else:
self.cLog(20, "Key store exists, loading")
self.stores["key"] = Store("key", "cryptography", nodeNickname)
self.cLog(20, "Store loaded")
# tks = self.stores["key"].get()
# self.publicKey = tks["self"]["publicKey"]
# self.privateKey = tks["self"]["privateKey"]
self.cLog(20, "Key store initialized")
def checkInMem(self, store: str, nodeID: str):
"""
Check if parameters or keys are loaded for node of nodeID
Parameters
----------
store: str
Whether to check loaded keys or parameters
"""
if store == "param":
return nodeID in self.loadedParams.keys()
elif store == "key":
return nodeID in self.loadedKeys.keys()
def loadRecordToMem(self, store: str, nodeID: str):
"""
Load record of nodeID from store to either keys or pameters
"""
r = self.getRecord(store, nodeID)
if r == False:
self.cLog(
30, "Tried to load nonexistent {0} for node {1}".format(store, nodeID)
)
return False
elif self.checkInMem(store, nodeID):
self.cLog(10, "{0}s already deserialized, skipping".format(store))
else:
if store == "param":
self.loadedParams[nodeID] = self.loadParamBytes(r)
elif store == "key":
self.loadedKeys[nodeID] = {
"publicKey": Serialization.load_pem_public_key(r["publicKey"]),
"privateKey": Serialization.load_pem_private_key(
r["privateKey"], None
),
}
return True
def getRecord(self, store: str, key: str):
"""
Get record from store: store with key: key
"""
r = stores[store].getRecord(key)
if r == False:
self.cLog(20, "Record does not exist")
return False
else:
return r
def initStore(self, store: str):
"""
Initialize store: store
"""
self.stores[store] = Store(store, "cryptography", self.nodeNickname)
if store == "param":
self.genParams()
self.stores[store].update("self", self.getParamsBytes(), recur=False)
elif store == "key":
self.stores[store].update("self", {}, recur=False)
else:
self.cLog(30, "Store not defined")
def genParams(self):
"""
Generate Diffie Hellman parameters
"""
params = dh.generate_parameters(generator=2, key_size=2048)
self.params = params
return params
def getParamsBytes(self):
"""
Get bytes encoded from self.parameters (TODO: Encode from store)
"""
return self.params.parameter_bytes(Encoding.PEM, ParameterFormat.PKCS3)
def loadParamBytes(self, pemBytes: bytes):
"""
Load parameters to self.params from given bytes (TODO: Load from store)
"""
self.params = Serialization.load_pem_parameters(pemBytes)
return self.params
def genKeyPair(self, paramsOverride=False, setSelf: bool = True):
"""
Generate public and private keys from self.params (TODO: Gen from passed params)
paramsOverride
False or parameters to use (TODO)
setSelf: bool
Whether to set self.privateKey and self.publicKey
"""
privateKey = self.params.generate_private_key()
if setSelf:
self.privateKey = privateKey
publicKey = privateKey.public_key()
if setSelf:
self.publicKey = publicKey
self.stores["key"].update(
"self",
{
"publicKey": self.publicKey.public_bytes(
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
),
"privateKey": self.privateKey.private_bytes(
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
),
},
)
return [privateKey, publicKey]
else:
publicKey = publicKey.public_bytes(
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
)
privateKey = privateKey.private_bytes(
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
)
return [privateKey, publicKey]
def keyDerive(self, pubKey: bytes, salt: bytes, nodeID: str, params: bytes):
"""
Derive shared key using Diffie Hellman
pubKey: bytes
Public key
nodeID: str
PierMesh node ID
params: bytes
Encryption parameters
"""
if self.checkInMem("param", nodeID) == False:
if self.getRecord("param", nodeID) == False:
self.updateStore("param", nodeID, params, recur=False)
self.loadRecordToMem("param", nodeID)
self.cLog(20, "Precheck done for key derivation")
# TODO: Load them and if private key exists load it, otherwise generate a private key
if self.checkInMem("key", nodeID) == False:
if self.getRecord("key", nodeID) == False:
privateKey, publicKey = self.genKeyPair(setSelf=False)
self.updateStore(
"key", nodeID, {"publicKey": publicKey, "privateKey": privateKey}
)
self.loadRecordToMem("key", nodeID)
sharedKey = self.loadedKeys[nodeID]["privateKey"].exchange(
Serialization.load_pem_public_key(pubKey)
)
# Perform key derivation.
self.cLog(20, "Performing key derivation")
derivedKey = HKDF(
algorithm=hashes.SHA256(), length=32, salt=salt, info=b"handshake data"
).derive(sharedKey)
self.cLog(20, "Derived key")
ederivedKey = base64.urlsafe_b64encode(derivedKey)
tr = self.getRecord("key", nodeID)
tr["derivedKey"] = ederivedKey
self.updateStore("key", nodeID, tr)
self.cLog(20, "Done with cryptography store updates")
return ederivedKey
def getSalt(self):
"""
Get random salt
"""
return os.urandom(16)
# TODO: Build in transport security (node/node)
def encrypt(self, data, nodeID: str, isDict: bool = True):
"""
Do Fernet encryption
data
Either bytes or dict to encrypt
isDict: bool
Whether data is a dictionary
"""
r = self.getRecord("key", nodeID)
if r == False:
self.cLog(20, "Node {0} not in keystore".format(nodeID))
return False
else:
derivedKey = r["derivedKey"]
fernet = Fernet(derivedKey)
if isDict:
data = msgpack.dumps(data)
token = fernet.encrypt(data)
return token
def decrypt(self, data, nodeID: str):
"""
Decrypt bytes and return either str or dict (TODO: Check whether to msgpack load)
"""
r = self.getRecord("key", nodeID)
if r == False:
self.cLog(20, "No record of node " + nodeID)
return False
elif not "derivedKey" in r.keys():
self.cLog(20, "No key derived for node " + nodeID)
return False
else:
fernet = Fernet(self.getRecord("key", nodeID)["derivedKey"])
return msgpack.loads(fernet.decrypt(data))

View File

@ -1,454 +0,0 @@
import os
import json
import msgpack
import random
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
# TODO: delete
# TODO: propagate json changes to msgpack automatically
# TODO: propagate msgpack changes to cache automatically
# TODO: Indexing
def _json_to_msg(path: str):
"""
Convert json at the path plus .json to a msgpack binary
Parameters
----------
path: str
Path to json minus the extension
"""
rpath = path + ".json"
res = b""
with open(rpath) as f:
res = msgpack.dumps(json.load(f))
with open(path, "wb") as f:
f.write(res)
class Daisy:
"""
Base class for Daisy data representation
`🔗 Source <https://git.utopic.work/PierMesh/piermesh/src/branch/main/Components/daisy.py>`_
Attributes
----------
filepath: str
Path to file representation on disk
msg: dict
In memory representation
"""
def __init__(
self,
filepath: str,
templates: dict = {},
template: bool = False,
prefillDict: bool = False,
):
"""
Parameters
----------
filepath: str
Path to disk location
templates: dict
Dictionary of templates to Use
template: bool
Which template to Use
prefillDict: bool
Whether to fill the record with a template
"""
self.filepath = filepath
if os.path.exists(filepath) != True:
with open(filepath, "wb") as f:
if template != False:
if template in templates.keys():
t = templates[template].get()
if prefillDict != False:
for k in prefillDict.keys():
t[k] = prefillDict[k]
f.write(msgpack.dumps(t))
self.msg = t
else:
print("No such template as: " + template)
else:
f.write(msgpack.dumps({}))
self.msg = {}
elif os.path.isdir(filepath):
self.msg = "directory"
else:
with open(filepath, "rb") as f:
self.msg = msgpack.loads(f.read())
# Use override for updating
def write(
self,
override=False,
encrypt: bool = False,
encryptKey=None,
recur: bool = False,
):
"""
Write record to disk
Parameters
----------
override
Either false or a dictionary of values to set on the record
encrypt: bool
Whether to encrypt the record (TODO)
encryptKey
Key to encrypt record with, or None if not set
recur: bool
Whether to recursively handle keys
"""
if override != False:
for key in override.keys():
# TODO: Deeper recursion
if recur:
if not key in self.msg.keys():
self.msg[key] = {}
for ikey in override[key].keys():
self.msg[key][ikey] = override[key][ikey]
else:
self.msg[key] = override[key]
data = msgpack.dumps(self.msg)
with open(self.filepath, "wb") as f:
f.write(data)
# Use for refreshing
def read(self, decrypt: bool = False, decryptKey=False):
"""
Read record from disk to memory
Parameters
----------
decrypt: bool
Whether to decrypt record
decryptKey
Key to decrypt record
"""
if os.path.isdir(self.filepath):
self.msg = "directory"
else:
with open(self.filepath, "rb") as f:
self.msg = msgpack.loads(f.read())
def get(self):
"""
Get record dictionary from memory
Returns
-------
self.msg: dict
"""
return self.msg
def sublist(self):
"""
Lists contents of directory if object is a directory, otherwise return None
"""
fpath = self.filepath
if os.path.isdir(fpath):
return ["messages/" + x for x in os.listdir(fpath)]
else:
return None
def loadTemplates(templatePath: str = "templates"):
"""Load templates for prefilling records
Parameters
----------
templatePath: str
Path to templates
"""
templates = {}
for p in os.listdir(templatePath):
p = templatePath + "/" + p
if os.path.isdir(p):
for ip in os.listdir(p):
ip = p + "/" + ip
if os.path.isdir(ip):
print("Too deep, skipping: " + ip)
else:
templates[ip] = Daisy(ip)
else:
templates[p] = Daisy(p)
self.templates = templates
return templates
class CFSHandler(FileSystemEventHandler):
"""
File system watchdog that propagates disk changes to records to their proper cache
"""
def __init__(self, cache, isCatch: bool = False):
"""
Parameters
----------
cache: Cache
Daisy cache to update
isCatch: bool
Is the cache for catchs
"""
self.cache = cache
self.isCatch = isCatch
super().__init__()
def on_any_event(self, event):
"""
Called when a CRUD operation is performed on a record file
Parameters
----------
event
Event object provided by watchdog
"""
if not (".json" in event.src_path):
if not (".md" in event.src_path):
tpath = "/".join(event.src_path.split("/")[1:])
if tpath != "":
if self.isCatch:
self.cache.sget(tpath)
else:
self.cache.get(tpath).get()
# TODO: Dumping to cacheFile
class Cache:
"""
In memory collection of Daisy records
"""
def __init__(
self,
filepaths=None,
cacheFile=None,
path: str = "daisy",
walk: bool = False,
isCatch: bool = False,
):
"""
Parameters
----------
filepaths
Either a list of filepaths to load or None
cacheFile
Path to a cache file which is a collection of paths to load
path: str
Path prefix to load records from
walk: bool
Whether to automatically walk the path and load records
isCatch: bool
Whether this cache is for catchs
"""
self.data = {}
self.path = path
self.event_handler = CFSHandler(self, isCatch=isCatch)
self.observer = Observer()
self.observer.schedule(self.event_handler, self.path, recursive=True)
self.observer.start()
# TODO: Test
if filepaths != None:
for fp in filepaths:
fp = path + "/" + fp
if os.path.isfile(fp):
self.data[fp] = Daisy(fp)
elif cacheFile != None:
with open(cacheFile, "r") as f:
for fp in f.read().split("\n"):
self.data[fp] = Daisy(fp)
elif walk:
for root, dirs, files in os.walk(self.path):
for p in dirs + files:
# print("walking")
if not (".json" in p):
if not (".md" in p):
tpath = root + "/" + p
# print(p)
# print(tpath)
self.data[tpath] = Daisy(tpath)
def create(self, path: str, data: dict):
"""
Create new record
Parameters
----------
path: str
Path to create record at
data: dict
Data to populate record with
"""
with open(self.path + "/" + path, "wb") as f:
f.write(msgpack.dumps(data))
logging.log(10, "Done creating record")
self.data[path] = Daisy(self.path + "/" + path)
logging.log(10, "Done loading to Daisy")
return self.data[path]
def get(self, path: str):
"""
Get record at path, else return False
path: str
Path of record
"""
if path in self.data.keys():
return self.data[path]
else:
if os.path.exists(self.path + "/" + path):
self.data[path] = Daisy(self.path + "/" + path)
return self.data[path]
else:
logging.log(10, "File does not exist")
return False
def refresh(self):
"""
Reload from disk to memory
"""
for key in self.data.keys():
self.data[key].read()
def search(self, keydict: dict, strict: bool = True):
"""
Search cache for record for records with values
keydict: dict
Values to search for
strict: bool
Whether to require values match
"""
results = []
for key, val in self.data.items():
val = val.get()
if strict and type(val) != str:
addcheck = False
for k, v in keydict.items():
if k in val.keys():
if v in val[k]:
addcheck = True
else:
addcheck = False
break
if addcheck:
results.append([key, val])
elif type(val) != str:
for k, v in keydict.items():
if k in val.keys():
if v in val[k]:
results.append([key, val])
return results
class Catch(Cache):
"""
Sub class of Cache for handling catchs
.. image:: https://git.utopic.work/PierMesh/piermesh/raw/branch/main/imgs/catchdisplay.png
"""
catches = {}
def __init__(
self, path: str = "catch", filepaths=None, catchFile=None, walk: bool = False
):
"""
Basically the same initialization parameters as Catch
"""
super().__init__(
filepaths=filepaths, cacheFile=catchFile, path=path, walk=walk, isCatch=True
)
# TODO: Fins
def sget(self, path: str):
"""
Call Cache's get to get record
"""
return super().get(path)
def get(self, head: str, tail: str, fins=None):
"""
Get catch by pieces
Parameters
----------
head: str
First part of catch (maximum: 4 characters)
tail: str
Second part of catch (maximum: 16 characters)
fins
List of (maximum 8 characters) strings at the end of the catch oe None if none
"""
r = self.search({"head": head, "tail": tail})
return r[0][1]["html"]
def addc(self, peer, node, seperator, head, tail, data, fins=None):
tnpath = "catch/" + node
if os.path.exists(tnpath) != True:
os.makedirs(tnpath)
tppath = tnpath + "/" + peer
if os.path.exists(tppath) != True:
os.makedirs(tppath)
sid = str(random.randrange(0, 999999)).zfill(6)
data["seperator"] = seperator
data["head"] = head
data["tail"] = tail
if fins != None:
data["fins"] = fins
res = self.create("{0}/{1}/{2}".format(node, peer, sid), data)
return [sid, res]
class Store(Daisy):
def __init__(self, store: str, path: str, nodeNickname: str):
fpath = "daisy/{0}/{1}".format(path, nodeNickname)):
cpath = "{0}/{1}/{2}".format(path, nodeNickname, store)
if not os.path.exists(fpath):
os.mkdir(fpath)
super().__init__("daisy/" + cpath)
def update(self, entry: str, data, recur: bool=True):
if recur:
for key in data.keys():
self.msg[entry][key] = data[key]
else:
self.msg[entry] = data
self.write()
def getRecord(self, key: str):
if key in self.get().keys():
return self.get()[key]
else:
self.cLog(20, "Record does not exist")
return False

View File

@ -1,66 +0,0 @@
import base64, uuid, json, sys, lzma, bson, random, msgpack
from Packets import Packet, HeaderPacket
def compressPackets(packets):
cpackets = []
for packet in packets:
cpacket = lzma.compress(packet)
cpackets.append(cpacket)
return cpackets
def decompressPackets(packets):
cpackets = []
for packet in packets:
cpacket = lzma.decompress(packet)
cpackets.append(cpacket)
return cpackets
# TODO: Sub packets
# Sub packets are indicated by a flag in the header
# TODO: Sub packets implementation
# TODO: Sub packet recursion, collapse
# TODO: Determine sub packet per subpackets allowed for header packet size, done, 5, but user ids and subpackets ids must be 6 digit integers
# Remove duplicate references to objects
# Local db that stores users to lookup for less metadata, daisy
# IDS MUST BE 6 DIGITS
# location prefix added by node
# Check if packet is header by checking if it has sender_id
# DO NOT CHANGE DATA SIZE UNLESS YOU KNOW WHAT YOURE DOING
# Moved to Packets/Packets
def reassemblePackets(packets):
#print(packets)
packet_count = msgpack.loads(packets[0])["packet_count"]
#print("Packet count")
#print(packet_count)
positions = []
for packet in packets:
p = msgpack.loads(packet)
num = 0
if "packet_number" in p:
num = p["packet_number"]
#print(p)
positions.append(num)
tpackets = []
for it in range(0, len(positions)):
tpackets.append(packets[positions.index(it)])
packets = tpackets
res = b""
#print("Reassembling")
#print(len(packets))
for it in range(len(packets)):
if it > 0:
#print(it)
#print(res)
#print(bson.loads(packets[it]).keys())
res = res + lzma.decompress(msgpack.loads(packets[it])["data"])
#print(len(res))
#print(bson.loads(res))
#print(res)
#print(bson.loads(res))
return msgpack.loads(res)
def raiseSizeError(index):
raise ValueError("Field of index: " + str(index) + " too big, maximum is 200 bytes")

View File

@ -1,37 +0,0 @@
import msgpack
from Siph.map import Network
from Components.daisy import Catch
from Components.daisy import Cache
import random
# TODO: Move intialization to run, this class is unnecessary
class Router:
"""
Router
"""
def __init__(self, cLog, nfpath="server.info"):
self.cLog = cLog
# TODO: Better network init
self.network = Network()
self.catch = Catch(walk=True)
self.cache = Cache(walk=True)
logger.log(10, "Loading server info")
self.serverInfo = self.cache.get(nfpath)
if self.serverInfo == False:
self.cache.create(nfpath, {"nodeID": random.randrange(0, 1000000)})
self.serverInfo = self.cache.get(nfpath)
self.n.addin(self.serverInfo.get()["nodeID"])
def getRoute(self, headerPacket):
headerPacket = msgpack.loads(headerPacket)
peer = headerPacket["recipient"]
node = headerPacket["node"]
def getCatch(self, head, tail, fins=None):
return self.c.get(head, tail, fins=fins)
def addc(self, peer, node, seperator, head, tail, data, fins=None):
self.c.addc(peer, node, seperator, head, tail, data, fins=fins)

View File

@ -1,30 +0,0 @@
import socketio
from threading import Thread
import transmission as t
import microplane as m
import bson
@sio.event
def send(sid, data):
packets = bson.dumps(data)
packets = m.getPackets(packets)
# TODO: Implement awaitFullResponse
threads = []
threads.append(Thread(target=t.awaitFullResponse, args=[pid])
for p in packets:
t.send(interface, p)
#awaitResponse(cpid)
pid = t.cpid threads.append(Thread(target=t.awaitResponse, args=[pid])
threads[-1].start()
time.sleep(1)
done = True
while True:
for th in threads:
if thread.is_alive():
done = False
if done:
break
sio.emit('done', {'data': t.messages[packets[0]["packets_id"]]["fresponse"]}, room=sid)
mgr = socketio.RedisManager('redis://')
sio = socketio.Server(client_manager=mgr)

View File

@ -1,193 +0,0 @@
import meshtastic
import meshtastic.serial_interface
from pubsub import pub
import bson
import microplane as m
import sys
import time
import asyncio
import functools
from util import sendData
from threading import Thread
import webview
from Daisy import Catch, Cache
from Filters import Base
tcache = Cache()
tcatch = Catch()
html = False
notConnected = True
messages = {}
acks = {}
# Be careful with this
cpid = 0
# TODO: Filter out non data packets/log them, DONE
# TODO: Figure out why the message count is resetting, DONE, the while loop was...looping
# TODO: Sending packets across multiple nodes/load balancing/distributed packet transmission/reception
def onReceive(packet, interface):
Base.sieve(packet)
tcache.refresh()
def onConnection(interface, topic=pub.AUTO_TOPIC): # called when we (re)connect to the radio
# defaults to broadcast, specify a destination ID if you wish
interface.sendData("connect".encode("utf-8"))
global notConnected
notConnected = False
def responseCheck(packet):
#print("got response")
#print("Checking")
#print(packet["decoded"])
rid = packet["decoded"]["requestId"]
print(rid)
# TODO: Set to false on error
print(packet["decoded"])
if (packet["decoded"]["routing"]["errorReason"] == "MAX_RETRANSMIT"):
print("Got ack error")
acks[str(rid)] = False
else:
acks[str(rid)] = True
# TODO: Threaded send nethod
def send(interface, packet):
global cpid
# TODO: Set to confirm receipt, DONE
# TODO: Async sendData call
# TODO: Fix logging error
print("sending")
pid = interface.sendData(packet, wantAck=True, onResponse=responseCheck)
#pid = await sendData(interface, packet, wantAck=True, onResponse=responseCheck)
#pid = await iloop.run_in_executor(None, functools.partial(interface.sendData, interface, packet, wantAck=True, onResponse=responseCheck))
# Can I use waitForAckNak on cpid?
cpid = pid.id
print(cpid)
#return pid
return True
def awaitResponse(pid):
#pid = interface.sendData(p, wantAck=True, onResponse=responseCheck)["id"]
#pid = await loop.run_in_executor(None, send, interface, p)
#pid = await loop.run_in_executor(None, functools.partial(interface.sendData, wantAck=True, onResponse=responseCheck), interface, p)
print(pid)
for i in range(1_000_000_000):
time.sleep(5)
if str(pid) in acks:
print("Response gotten")
break
print("waiting")
return True
def awaitFullResponse(pid):
for i in range(1_000_000_000):
time.sleep(5)
if pid in messages.keys():
if messages[pid]["finished"]:
print("Response gotten")
break
print("waiting")
return True
pub.subscribe(onReceive, "meshtastic.receive")
pub.subscribe(onConnection, "meshtastic.connection.established")
# By default will try to find a meshtastic device, otherwise provide a device path like /dev/ttyUSB0
interface = meshtastic.serial_interface.SerialInterface(sys.argv[-1])
# Wait to connect to partner
# TODO: use node id to delivery directly
while notConnected:
time.sleep(5)
print("Waiting")
if "0" in sys.argv[-1]:
tj = [[{"message":"free palestine! free all colonized people!"}], ["the people yearn for freedom"]]
j2 = {"message":"free palestine! free all colonized people!", "message2":"free palestine! free all colonized people!"}
htmlj = {"html": "<h1>Hello world!</h1>"}
htmljl = {"html": ""}
with open("test.html", "r") as f:
htmljl["html"] = f.read()
done = False
threads = {}
for p in m.getPackets(bson.dumps(htmljl), 600123, 600123, 600124):
print(sys.getsizeof(p))
#send_thread = Thread(target=send, args=[interface, p])
#send_thread.start()
send(interface, p)
#awaitResponse(cpid)
await_thread = Thread(target=awaitResponse, args=[cpid])
await_thread.start()
cth = {
"ob": await_thread,
"pid": str(cpid),
"packet": p,
"retry": False
}
threads[str(cpid)] = cth
# TODO: see if theres a better way to do this
time.sleep(10)
#await_thread.join()
#await_thread.join()
#loop = asyncio.new_event_loop()
#loopi = asyncio.new_event_loop()
#loopi.run_until_complete(send(interface, p))
#res = loop.run_until_complete(awaitResponse(cpid))
# figure out why it doesnt send before timing out
# TODO: running in different threads
#pid = send(interface, p).id
#loop = asyncio.new_event_loop()
#loopi = asyncio.new_event_loop()
#loopi.run_until_complete(send(loopi, interface, p))
#interface.waitForAckNak()
#res = loop.run_until_complete(awaitResponse(interface, p, cpid))
#print("Done waiting")
#interface.waitForAckNak()
# DO NOT RUN UNTIL responseCheck CHECKS FOR ERRORS
isDone = False
while not isDone:
doneFlag = True
for th in threads.keys():
th = threads[th]
if not th["ob"].is_alive:
if not acks[th["pid"]]:
retry = th["retry"]
if retry == False:
retry = 1
elif retry < 3:
retry += 1
else:
print("Too many retries")
break
doneFlag = False
send(interface, th["packet"])
await_thread = Thread(target=awaitResponse, args=[cpid])
await_thread.start()
cth = {
"ob": await_thread,
"pid": str(cpid),
"packet": p
}
cth["retry"] = retry
threads[str(cpid)] = cth
# TODO: see if theres a better way to do this
time.sleep(5)
if doneFlag:
isDone = True
for it, p in enumerate(m.getPacketsFromFile("r.jpg", 600123, 600123, 600124)):
#send(interface, p)
#pid = send(interface, p).id
#loopi = asyncio.new_event_loop()
#loopi.run_until_complete(send(loopi, interface, p))
#interface.waitForAckNak()
#res = loop.run_until_complete(awaitResponse(interface, p, cpid))
#interface.waitForAckNak()
#print("Sending packet: " + str(it))
break
else:
while True:
if html != False:
break
pass
webview.create_window('Home', html=html)
webview.start()