scheduler api
This commit is contained in:
parent
dbbc41705e
commit
c4e745ba2e
7 changed files with 77 additions and 10 deletions
|
@ -195,9 +195,10 @@ repeat
|
|||
drawui(command, args, backoff)
|
||||
|
||||
local ret = nil
|
||||
local err = nil
|
||||
|
||||
if command then
|
||||
ret = commands[command](args)
|
||||
ret, err = commands[command](args)
|
||||
end
|
||||
|
||||
if command == "Update" and ret == false then
|
||||
|
@ -212,6 +213,12 @@ repeat
|
|||
ret_table = "Success"
|
||||
else
|
||||
ret_table = "Failure"
|
||||
term.setCursorPos(1,11)
|
||||
term.clearLine()
|
||||
term.setTextColor(colors.white)
|
||||
term.write("error: ")
|
||||
term.setTextColor(colors.red)
|
||||
print(err)
|
||||
end
|
||||
elseif ret then
|
||||
ret_table = ret
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use log::warn;
|
||||
use log::{warn, info};
|
||||
use tokio::sync::{Mutex, OwnedMutexGuard};
|
||||
|
||||
use crate::{blocks::Position, turtle::TurtleCommander};
|
||||
|
@ -27,7 +27,7 @@ impl Depots {
|
|||
}
|
||||
|
||||
pub async fn dock(&self, turtle: TurtleCommander) -> Option<usize> {
|
||||
let depot = self.nearest(turtle.pos().await).await?;
|
||||
let depot = self.clone().nearest(turtle.pos().await).await?;
|
||||
turtle.goto(*depot).await?;
|
||||
|
||||
// dump inventory
|
||||
|
@ -61,6 +61,11 @@ impl Depots {
|
|||
Some(turtle.fuel())
|
||||
}
|
||||
|
||||
pub async fn add(&self, pos: Position) {
|
||||
info!("new depot at {pos:?}");
|
||||
self.depots.lock().await.push(Arc::new(Mutex::new(pos)))
|
||||
}
|
||||
|
||||
pub fn from_vec(vec: Vec<Position>) -> Self {
|
||||
let mut depots = Vec::new();
|
||||
for depot in vec {
|
||||
|
|
|
@ -22,16 +22,24 @@ pub async fn fell_tree(turtle: TurtleCommander, bottom: Vec3) -> Option<()> {
|
|||
const SWEEP_DELAY: usize = 16;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
struct TreeFarm {
|
||||
pub struct TreeFarm {
|
||||
position: Vec3,
|
||||
size: Vec3,
|
||||
last_sweep: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl TreeFarm {
|
||||
pub fn new(position: Vec3) -> Self {
|
||||
Self {
|
||||
position,
|
||||
size: Vec3::new(5,1,2),
|
||||
last_sweep: OffsetDateTime::UNIX_EPOCH,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn sweep(&self, turtle: TurtleCommander) -> Option<()> {
|
||||
let trees = self.size.x * self.size.y * self.size.z;
|
||||
turtle.dock().await;
|
||||
//turtle.dock().await;
|
||||
for tree in 0..trees {
|
||||
let index = fill(self.size, tree);
|
||||
let offset = index.component_mul(&Vec3::new(2, 32, 2));
|
||||
|
|
|
@ -115,7 +115,7 @@ async fn read_from_disk() -> anyhow::Result<LiveState> {
|
|||
|
||||
let depots = match tokio::fs::OpenOptions::new()
|
||||
.read(true)
|
||||
.open(SAVE.get().unwrap().join("turtles.json"))
|
||||
.open(SAVE.get().unwrap().join("depots.json"))
|
||||
.await
|
||||
{
|
||||
tokio::io::Result::Ok(file) => serde_json::from_reader(file.into_std().await)?,
|
||||
|
@ -127,7 +127,7 @@ async fn read_from_disk() -> anyhow::Result<LiveState> {
|
|||
|
||||
let scheduler = match tokio::fs::OpenOptions::new()
|
||||
.read(true)
|
||||
.open(SAVE.get().unwrap().join("scheduler.json"))
|
||||
.open(SAVE.get().unwrap().join("tasks.json"))
|
||||
.await
|
||||
{
|
||||
tokio::io::Result::Ok(file) => serde_json::from_reader(file.into_std().await)?,
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use erased_serde::serialize_trait_object;
|
||||
use log::info;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::sync::{Mutex, MutexGuard, RwLock, OwnedMutexGuard};
|
||||
use tokio::task::JoinHandle;
|
||||
|
@ -44,6 +45,7 @@ impl Scheduler {
|
|||
}
|
||||
|
||||
pub fn add_task(&mut self, task: Box<dyn Task>) {
|
||||
info!("new task");
|
||||
self.tasks.push(task);
|
||||
}
|
||||
|
||||
|
|
|
@ -357,7 +357,7 @@ pub(crate) async fn process_turtle_update(
|
|||
let info = TurtleInfo::from_update(update, turtle.name.clone(), turtle.position.clone());
|
||||
|
||||
if let TurtleCommandResponse::Failure = info.ret {
|
||||
warn!("{} command failure", turtle.name.to_str());
|
||||
warn!("{}: command failure", turtle.name.to_str());
|
||||
}
|
||||
|
||||
if let Some(send) = turtle.callback.take() {
|
||||
|
@ -385,7 +385,7 @@ pub(crate) async fn process_turtle_update(
|
|||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub(crate) enum TurtleCommand {
|
||||
pub enum TurtleCommand {
|
||||
Wait(u32),
|
||||
Forward(u32),
|
||||
Backward(u32),
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use tokio;
|
||||
use blocks::Vec3;
|
||||
use crate::fell::TreeFarm;
|
||||
use crate::turtle::TurtleCommandResponse;
|
||||
use crate::turtle::TurtleCommander;
|
||||
use crate::turtle::TurtleInfo;
|
||||
|
@ -35,7 +36,11 @@ pub fn turtle_api() -> Router<SharedControl> {
|
|||
.route("/:id/dig", post(dig))
|
||||
.route("/:id/cancelTask", post(cancel))
|
||||
.route("/:id/manual", post(run_command))
|
||||
.route("/:id/dock", post(dock))
|
||||
.route("/:id/info", get(turtle_info))
|
||||
.route("/createTreeFarm", post(fell))
|
||||
.route("/registerDepot", post(new_depot))
|
||||
.route("/pollScheduler", get(poll))
|
||||
.route("/updateAll", get(update_turtles))
|
||||
}
|
||||
|
||||
|
@ -73,6 +78,16 @@ pub(crate) async fn place_up(
|
|||
Json(response)
|
||||
}
|
||||
|
||||
pub(crate) async fn dock(
|
||||
Path(id): Path<u32>,
|
||||
State(state): State<SharedControl>,
|
||||
) -> Json<usize> {
|
||||
let state = state.read().await;
|
||||
let commander = state.get_turtle(id).await.unwrap().clone();
|
||||
drop(state);
|
||||
Json(commander.dock().await.unwrap())
|
||||
}
|
||||
|
||||
pub(crate) async fn run_command(
|
||||
Path(id): Path<u32>,
|
||||
State(state): State<SharedControl>,
|
||||
|
@ -102,12 +117,42 @@ pub(crate) async fn dig(
|
|||
"ACK"
|
||||
}
|
||||
|
||||
pub(crate) async fn new_depot(
|
||||
State(state): State<SharedControl>,
|
||||
Json(req): Json<Position>,
|
||||
) -> &'static str {
|
||||
let depots = &state.read().await.depots;
|
||||
depots.add(req).await;
|
||||
|
||||
"ACK"
|
||||
}
|
||||
|
||||
pub(crate) async fn poll(
|
||||
State(state): State<SharedControl>,
|
||||
) -> &'static str {
|
||||
let schedule = &mut state.write().await.tasks;
|
||||
schedule.poll().await;
|
||||
|
||||
"ACK"
|
||||
}
|
||||
|
||||
pub(crate) async fn fell(
|
||||
State(state): State<SharedControl>,
|
||||
Json(req): Json<Vec3>,
|
||||
) -> &'static str {
|
||||
let schedule = &mut state.write().await.tasks;
|
||||
schedule.add_task(Box::new(TreeFarm::new(req)));
|
||||
|
||||
"ACK"
|
||||
}
|
||||
|
||||
pub(crate) async fn set_goal(
|
||||
Path(id): Path<u32>,
|
||||
State(state): State<SharedControl>,
|
||||
Json(req): Json<Position>,
|
||||
) -> &'static str {
|
||||
let turtle = state.read().await.get_turtle(id).await.unwrap();
|
||||
let turtle = state.read().await.get_turtle(id).await.unwrap().clone();
|
||||
drop(state);
|
||||
tokio::spawn(async move {turtle.goto(req).await.expect("route failed")});
|
||||
|
||||
"ACK"
|
||||
|
|
Loading…
Reference in a new issue