Skip to content

Commit

Permalink
Wdym impulsive? (pt 2)
Browse files Browse the repository at this point in the history
Shit works!
  • Loading branch information
strawmelonjuice committed Jul 30, 2024
1 parent 12ac1c5 commit 0ef7f58
Show file tree
Hide file tree
Showing 13 changed files with 458 additions and 114 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ license = "AGPL-3.0-only"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[[bin]]
name = "cynthiaweb"
path = "./Main/src/main.rs"
path = "./source/Main/src/main.rs"


[dependencies]
Expand Down
28 changes: 27 additions & 1 deletion package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 6 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,12 @@
"typescript": "^5.0.0",
"postcss-cli": "^11.0.0",
"@biomejs/biome": "latest",
"@types/node": "latest",
"clean-css-cli": "4"
},
"dependencies": {
"cynthia-plugin-api": "file:source/Plugin-runners/node-plugin-api/"
},
"scripts": {
"build": "npm run build:deps && npm run build:rs",
"build:deps": "npm run build:css && npm run build:jsts && npm run minify",
Expand All @@ -23,6 +27,6 @@
"minify-disabled": "npm run minify:js && npm run minify:css",
"minify:js": "npx terser src-frontend/js/prefetch.js > ./target/generated/js/prefetch.min.js && npx terser src-frontend/js/login.js > ./target/generated/js/login.min.js && npx terser src-frontend/js/login.js > ./target/generated/js/login.min.js && npx terser src-frontend/js/site-home.js > ./target/generated/js/site-home.min.js && npx terser src-frontend/js/site-index.js > ./target/generated/js/site-index.min.js && npx terser src-frontend/js/signup.js > ./target/generated/js/signup.min.js",
"minify:css": "npx cleancss -O1 specialComments:all --inline none ./target/generated/css/main.css > ./target/generated/css/main.min.css",
"start": "cargo run"
"start": "npm run build && cargo run"
}
}
}
12 changes: 6 additions & 6 deletions source/Main/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ pub struct CynthiaConfClone {
pub generator: Generator,
pub logs: Option<Logging>,
pub scenes: SceneCollection,
pub runtimes: Runtimes
pub runtimes: Runtimes,
}

impl CynthiaConfig for CynthiaConfClone {
Expand All @@ -99,7 +99,7 @@ impl CynthiaConfig for CynthiaConfClone {
generator: self.generator.clone(),
logs: self.logs.clone(),
scenes: self.scenes.clone(),
runtimes: self.runtimes.clone()
runtimes: self.runtimes.clone(),
}
}
fn clone(&self) -> CynthiaConfClone {
Expand All @@ -110,7 +110,7 @@ impl CynthiaConfig for CynthiaConfClone {
generator: self.generator.clone(),
logs: self.logs.clone(),
scenes: self.scenes.clone(),
runtimes: self.runtimes.clone()
runtimes: self.runtimes.clone(),
}
}
}
Expand All @@ -123,7 +123,7 @@ impl CynthiaConfig for CynthiaConf {
generator: self.generator.clone(),
logs: self.logs.clone(),
scenes: self.scenes.clone(),
runtimes: self.runtimes.clone()
runtimes: self.runtimes.clone(),
}
}
fn clone(&self) -> CynthiaConfClone {
Expand All @@ -134,7 +134,7 @@ impl CynthiaConfig for CynthiaConf {
generator: self.generator.clone(),
logs: self.logs.clone(),
scenes: self.scenes.clone(),
runtimes: self.runtimes.clone()
runtimes: self.runtimes.clone(),
}
}
}
Expand All @@ -152,7 +152,7 @@ impl CynthiaConf {
generator: self.generator.clone(),
logs: self.logs.clone(),
scenes: self.scenes.clone(),
runtimes: self.runtimes.clone(),
runtimes: self.runtimes.clone(),
}
}
}
Expand Down
185 changes: 157 additions & 28 deletions source/Main/src/externalpluginservers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,33 +10,53 @@
// This module will be a testing ground. V2 was unreliable and had a lot of issues, especially because it didn't keep the servers attached. It just let them run.
// This module will be a testing ground for a new system that will be more reliable and more secure.
// More specifically: The plugins will attach to js again, but inside of a controlled environment.

#[derive(Serialize, Deserialize, Debug)]
pub(crate) struct EPSRequest {
pub(crate) id: u64,
pub(crate) command: String,
id: EPSCommunicationsID,
pub(crate) body: EPSRequestBody,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(tag = "for")]
pub(crate) enum EPSRequestBody {
Close,
Test { test: String },
}

#[derive(Serialize, Deserialize, Debug, Clone)]
pub(crate) struct EPSResponse {
pub(crate) id: u64,
id: EPSCommunicationsID,
pub(crate) body: EPSResponseBody,
}
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(tag = "as")]
pub(crate) enum EPSResponseBody {
NoneOk,
Json(String),
// We'll add more types later, these will be very specific to the calls made by the server.
OkString { value: String },
Json { value: String },
Error { message: Option<String> },
}

use crate::config::CynthiaConfig;
use crate::files::tempfolder;
use crate::{EPSCommunicationsID, ServerContext};
use actix_web::web::Data;
use futures::FutureExt;

Check warning

Code scanning / clippy

unused import: futures::FutureExt Warning

unused import: futures::FutureExt
use interactive_process::InteractiveProcess;
use log::debug;
use serde::{Deserialize, Serialize};
use serde_json::from_str;
use std::process::Command;
use std::sync::{Arc};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use tokio::sync::mpsc::Receiver;
use crate::files::tempfolder;
use crate::ServerContext;
use tokio::sync::Mutex;

pub(crate) async fn main(
_server_context_mutex: Arc<Mutex<ServerContext>>,
mut eps_r: Receiver<EPSRequest>,
) {
let rt = tokio::runtime::Runtime::new().unwrap();

pub(crate) async fn main(_server_context_mutex: Arc<tokio::sync::Mutex<ServerContext>>, mut eps_r: Receiver<EPSRequest>) {
let config_clone = {
// We need to clone the config because we can't hold the lock while we're in the tokio runtime.
let server_context = _server_context_mutex.lock().await;
Expand All @@ -45,7 +65,7 @@ pub(crate) async fn main(_server_context_mutex: Arc<tokio::sync::Mutex<ServerCon
// We gotta write the javascript to a temporary file and then run it.
let jstempfolder = tempfolder().join("js");
std::fs::create_dir_all(&jstempfolder).unwrap();
let jsfile = include_bytes!("../target/generated/js/main.js");
let jsfile = include_bytes!("../../../target/generated/js/main.js");

Check failure

Code scanning / clippy

couldn't read source/Main/src/../../../target/generated/js/main.js: No such file or directory (os error 2) Error

couldn't read source/Main/src/../../../target/generated/js/main.js: No such file or directory (os error 2)
std::fs::write(jstempfolder.join("main.js"), jsfile).unwrap();
// now we can run the javascript
let node_runtime: &str = config_clone.runtimes.node.as_ref();
Expand All @@ -56,25 +76,29 @@ pub(crate) async fn main(_server_context_mutex: Arc<tokio::sync::Mutex<ServerCon
let y = p.clone();
match line {
Ok(o) => {
if o.starts_with("send: ") {
let l = o.split("send: ").collect::<Vec<&str>>()[1];
if o.starts_with("parse: ") {
let l = o.split("parse: ").collect::<Vec<&str>>()[1];
let mut z = y.lock().unwrap();
z.push_str(l);
debug!("JsPluginRuntime is now parsing `{l}` of `{z}`");
let q = from_str::<EPSResponse>(z.as_str());
match q {
Ok(o) => {
println!("{:#?}", o);
debug!("JsPluginRuntime parsed a response: {:?}", o);
rt.spawn(and_now(o, _server_context_mutex.clone()));
z.clear();
}
_ => {}
}
} else {
if o.replace("\n", "").is_empty() {
// Just wait for the next line lol
// Just wait for the next line lol
} else {
let mut z = y.lock().unwrap();
z.clear();
println!("{}", o);
config_clone
.clone()
.tell(format!("[JsPluginRuntime]: {}", o));
}
}

Check warning

Code scanning / clippy

this else { if .. } block can be collapsed Warning

this else { if .. } block can be collapsed
}
Expand All @@ -84,20 +108,125 @@ pub(crate) async fn main(_server_context_mutex: Arc<tokio::sync::Mutex<ServerCon
.unwrap();
loop {
match eps_r.recv().await {
Some(r) => {
match r.command.as_str() {
"close" => {
proc.send("close").unwrap()
}
_ => {}
}
Some(o) => {
let mut s = String::from("parse: ");
s.push_str(serde_json::to_string(&o).unwrap().as_str());
debug!("Sending to JsPluginRuntime: `{}`", s);
proc.send(s.as_str()).unwrap();
}
_ => {}
}

}
}

fn contact_eps() {
todo!()
}
async fn and_now(res: EPSResponse, _server_context_mutex: Arc<Mutex<ServerContext>>) {
let mut server_context = _server_context_mutex.lock().await;
server_context
.external_plugin_server
.response_queue
.push(Some(res));
debug!("Added response to external plugin server queue.");
// panic!("The function runs! Finally! It runs!");
}

pub(crate) async fn contact_eps(
_server_context_mutex: Data<Arc<Mutex<ServerContext>>>,
req: EPSRequestBody,
) -> EPSResponse {
let random_id = {
let mut d: EPSCommunicationsID;
loop {
d = rand::random::<EPSCommunicationsID>();
// Verify that this number is not already in the vector of unreturned responses.
let mut server_context = _server_context_mutex.lock().await;
if !server_context
.external_plugin_server
.response_queue
.iter()
.any(|o| match o {
Some(a) => a.id == d,
None => false,
})
{
// It's unique! Now add it to the vector to claim it.
server_context.external_plugin_server.unreturned_ids.push(d);
break
} else {
continue;
};
}
d
};

let eps_r = {
let server_context = _server_context_mutex.lock().await;
server_context.external_plugin_server.sender.clone()
};
match eps_r
.send(EPSRequest {
id: random_id,
body: req,
})
.await
{
Ok(_) => {
debug!("Sent request to external plugin server.");
}
_ => {
panic!("Failed to send request to external plugin server.");
}
};
// This function sends a request over mpsc to the externalpluginservers::main function, then periodically locks the server mutex and checks if a corresponding response (matched by `id`) is added, if not, it will try again.
// After sending, check for received responses.
let mut wait = tokio::time::interval(tokio::time::Duration::from_micros(60));
loop {
wait.tick().await;
{
// Lock the server context mutex and check if the response is in the queue.
let mut server_context = _server_context_mutex.lock().await;
// Remove every none value from server_context.external_plugin_server.response_queue
server_context
.external_plugin_server
.response_queue
.retain(|o| match o {
Some(_) => true,
None => false,
});

let left_threads = server_context.external_plugin_server.unreturned_ids.len();
for o in server_context
.external_plugin_server
.response_queue
.iter_mut()
{
if let Some(a) = o {
debug!("[EPSQuechecker]: Checking response from external plugin server queue: {:?}", a);
if a.id == random_id {
// Match! Return the response and remove it from the vector.
drop(wait);
// Remove it from the unreturned vec
let p = o.take().unwrap();
drop(server_context);
{
let mut server_context = _server_context_mutex.lock().await;
server_context.external_plugin_server.unreturned_ids.retain(
|a| a != &random_id
);
return p;
}
} else {
debug!(
"[EPSQuechecker]: No match. Continuing.\n\n\n\r{} <-- What we expected\n\r{} <-- What we got",
random_id, a.id
);
// No match! Another thread wants this. Keep it in the vector and continue.
// Unless there should be no other thread! Check for this by:
if left_threads <= 1 {
panic!("Incorrect data in the js queue. Might the ID's be altered by js's rounding?")
}
}
};
}
}
}
}
Loading

0 comments on commit 0ef7f58

Please sign in to comment.