task work
there is currently a bug where multiple tasks are getting scheduled on the same turtle, to disastrous effect.
This commit is contained in:
parent
cc29c64485
commit
4c9fab0ed7
8 changed files with 54 additions and 28 deletions
|
@ -47,7 +47,7 @@ impl Depots {
|
|||
if let TurtleCommandResponse::Failure = re.ret {
|
||||
// partial refuel, good enough
|
||||
warn!("only received {} fuel", turtle.fuel());
|
||||
if turtle.fuel() > 5000 {
|
||||
if turtle.fuel() > 1500 {
|
||||
break;
|
||||
} else {
|
||||
turtle.execute(Wait(15)).await;
|
||||
|
|
|
@ -6,7 +6,7 @@ use time::OffsetDateTime;
|
|||
use tokio::task::JoinHandle;
|
||||
use typetag::serde;
|
||||
|
||||
use crate::{blocks::{Vec3, Position, Direction}, turtle::{TurtleCommander, TurtleCommand, TurtleCommandResponse, InventorySlot}, tasks::{Task, TaskState}, depot::Depots, mine::fill};
|
||||
use crate::{blocks::{Vec3, Position, Direction}, turtle::{TurtleCommander, TurtleCommand, TurtleCommandResponse, InventorySlot}, tasks::{Task, TaskState}, depot::Depots, mine::fill, paths::TRANSPARENT};
|
||||
|
||||
pub async fn fell_tree(turtle: TurtleCommander, bottom: Vec3) -> Option<()> {
|
||||
let mut log = bottom;
|
||||
|
@ -22,7 +22,7 @@ pub async fn fell_tree(turtle: TurtleCommander, bottom: Vec3) -> Option<()> {
|
|||
}
|
||||
|
||||
/// Minutes before checking
|
||||
const SWEEP_DELAY: usize = 16;
|
||||
const SWEEP_DELAY: i64 = 16;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct TreeFarm {
|
||||
|
@ -91,17 +91,19 @@ impl TreeFarm {
|
|||
|
||||
// plant saplings
|
||||
for tree in 0..trees {
|
||||
let sapling = match pop_item(&mut saplings) {
|
||||
Some(slot) => slot,
|
||||
None => break,
|
||||
};
|
||||
|
||||
let index = fill(self.size, tree);
|
||||
let offset = index.component_mul(&spacing);
|
||||
let tree = self.position + offset;
|
||||
let near = turtle.goto_adjacent(tree).await?;
|
||||
turtle.execute(TurtleCommand::Select(sapling)).await;
|
||||
turtle.execute(near.place(tree)?).await;
|
||||
|
||||
if !turtle.world().occupied(tree).await {
|
||||
let sapling = match pop_item(&mut saplings) {
|
||||
Some(slot) => slot,
|
||||
None => break,
|
||||
};
|
||||
let near = turtle.goto_adjacent(tree).await?;
|
||||
turtle.execute(TurtleCommand::Select(sapling)).await;
|
||||
turtle.execute(near.place(tree)?).await;
|
||||
}
|
||||
}
|
||||
|
||||
Some(())
|
||||
|
@ -142,7 +144,7 @@ impl Task for TreeFarm {
|
|||
|
||||
fn poll(&mut self) -> TaskState {
|
||||
let elapsed = OffsetDateTime::now_utc() - self.last_sweep;
|
||||
if elapsed.whole_minutes() <= 16 {
|
||||
if elapsed.whole_minutes() <= SWEEP_DELAY {
|
||||
return TaskState::Waiting;
|
||||
}
|
||||
self.last_sweep = OffsetDateTime::now_utc();
|
||||
|
|
|
@ -71,6 +71,7 @@ async fn main() -> Result<(), Error> {
|
|||
|
||||
info!("writing");
|
||||
write_to_disk(&*state.read().await).await?;
|
||||
info!("written");
|
||||
|
||||
server.closed().await;
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@ pub async fn route_facing(from: Position, to: Vec3, world: &World) -> Option<Vec
|
|||
}
|
||||
|
||||
pub async fn route(from: Position, to: Position, world: &World) -> Option<Vec<Position>> {
|
||||
trace!("routing from {from:?} to {to:?}");
|
||||
// attempt at not crashing by looking infinitely into the abyss
|
||||
if world.get(to.pos).await
|
||||
.is_some_and(|b| difficulty(&b.name).is_none())
|
||||
|
|
|
@ -52,8 +52,8 @@ pub(crate) async fn serve(server: Router, listener: TcpListener) -> Sender<()> {
|
|||
loop {
|
||||
tokio::select! {
|
||||
result = conn.as_mut() => {
|
||||
if result.is_err() {
|
||||
error!("req failed");
|
||||
if let Err(e) = result {
|
||||
error!("req failed: {e}");
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -37,12 +37,15 @@ impl Default for Scheduler {
|
|||
|
||||
impl Scheduler {
|
||||
/// Add a new turtle to the scheduler
|
||||
/// Whether or not the turtle is already in the scheduler is not verified
|
||||
pub fn add_turtle(&mut self, turtle: &TurtleCommander) {
|
||||
let name = turtle.name();
|
||||
if self.turtles.iter().any(|(t,_)| t.name() == name ) {
|
||||
return;
|
||||
}
|
||||
self.turtles.push((
|
||||
turtle.clone(),
|
||||
None
|
||||
));
|
||||
));
|
||||
}
|
||||
|
||||
pub fn add_task(&mut self, task: Box<dyn Task>) {
|
||||
|
@ -67,7 +70,8 @@ impl Scheduler {
|
|||
turtle_positions.push(turtle.0.pos().await);
|
||||
}
|
||||
|
||||
for task in &mut self.tasks {
|
||||
let mut done = vec![false; self.tasks.len()];
|
||||
for (i, task) in self.tasks.iter_mut().enumerate() {
|
||||
let poll = task.poll();
|
||||
if let TaskState::Ready(position) = poll {
|
||||
let closest_turtle = match free_turtles.iter_mut().zip(turtle_positions.iter()).min_by_key( |(_,p)| {
|
||||
|
@ -80,10 +84,17 @@ impl Scheduler {
|
|||
closest_turtle.1 = Some(task.run(closest_turtle.0.clone()));
|
||||
}
|
||||
if let TaskState::Complete = poll {
|
||||
// TODO: removal
|
||||
done[i] = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// this feels like a hack
|
||||
let mut i = 0;
|
||||
self.tasks.retain(|_| {
|
||||
let cont = !done[i];
|
||||
i+=1;
|
||||
cont
|
||||
});
|
||||
}
|
||||
|
||||
pub async fn cancel(&mut self, turtle: Name) -> Option<()> {
|
||||
|
|
|
@ -211,8 +211,17 @@ impl TurtleCommander {
|
|||
self.world.clone()
|
||||
}
|
||||
|
||||
pub async fn dock(&self) -> Option<usize> {
|
||||
Depots::dock(&self.depots, self.to_owned()).await
|
||||
pub async fn dock(&self) -> usize {
|
||||
loop {
|
||||
let res = Depots::dock(&self.depots, self.to_owned()).await;
|
||||
if let Some(fuel) = res {
|
||||
return fuel;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn try_dock(&self) -> Option<usize> {
|
||||
self.depots.dock(self.clone()).await
|
||||
}
|
||||
|
||||
pub async fn goto(&self, pos: Position) -> Option<()> {
|
||||
|
@ -227,6 +236,8 @@ impl TurtleCommander {
|
|||
let routing = timeout(Duration::from_secs(2), route(recent, pos, &world));
|
||||
let route = routing.await.ok()??;
|
||||
|
||||
trace!("using route: {route:#?}");
|
||||
|
||||
let steps: Vec<TurtleCommand> = route.iter().map_windows(|[from,to]| from.difference(**to).unwrap()).collect();
|
||||
|
||||
'route: for (next_position, command) in route.into_iter().skip(1).zip(steps) {
|
||||
|
|
|
@ -93,7 +93,7 @@ pub(crate) async fn dock(
|
|||
let state = state.read().await;
|
||||
let commander = state.get_turtle(id).await.unwrap().clone();
|
||||
drop(state);
|
||||
Json(commander.dock().await.unwrap())
|
||||
Json(commander.dock().await)
|
||||
}
|
||||
|
||||
pub(crate) async fn run_command(
|
||||
|
@ -163,7 +163,7 @@ pub(crate) async fn cancel(
|
|||
Path(id): Path<u32>,
|
||||
State(state): State<SharedControl>,
|
||||
) -> &'static str {
|
||||
//state.write().await.tasks
|
||||
state.write().await.tasks.cancel(Name::from_num(id)).await;
|
||||
|
||||
"ACK"
|
||||
}
|
||||
|
@ -211,11 +211,11 @@ pub(crate) async fn command(
|
|||
Some(command) => command,
|
||||
None => {
|
||||
tokio::spawn(async move {
|
||||
let state = &state.clone();
|
||||
if Instant::elapsed(&state.clone().read().await.started).as_secs_f64() > STARTUP_ALLOWANCE {
|
||||
let schedule = &mut state.write().await.tasks;
|
||||
schedule.poll().await;
|
||||
}
|
||||
//let state = &state.clone();
|
||||
//if Instant::elapsed(&state.clone().read().await.started).as_secs_f64() > STARTUP_ALLOWANCE {
|
||||
// let schedule = &mut state.write().await.tasks;
|
||||
// schedule.poll().await;
|
||||
//}
|
||||
});
|
||||
turtle::TurtleCommand::Wait(IDLE_TIME)
|
||||
},
|
||||
|
|
Loading…
Reference in a new issue