I tried the below:
let mut items = BTreeSet::new(); // Unique list of the items
let mut ins: BTreeMap<String, Vec<Inventory>> = BTreeMap::new(); // saving ins for each item as Vec
let mut outs: BTreeMap<String, Vec<OUTs>> = BTreeMap::new(); // saving outs for each item as vec
// Reading ins and build the unique items list
let file_path_ins = Path::new("src/ins.csv");
let mut rdr_ins = csv::ReaderBuilder::new()
.has_headers(true)
.delimiter(b',')
.from_path(file_path_ins).unwrap();
for result in rdr_ins.records() {
let record: INs = result.unwrap().deserialize(None).unwrap();
let mut buf = VecDeque::new();
buf.push_back(INs { item: record.item.clone(), date: record.date,
batch: record.batch, quantity: record.quantity, });
ins.entry(record.item.clone())
.or_insert_with(Vec::new)
.push(Inventory{ inventory: buf});
items.insert(record.item);
};
// Read outs
let file_path_outs = Path::new("src/outs.csv");
let mut rdr_outs = csv::ReaderBuilder::new()
.has_headers(true)
.from_path(file_path_outs).unwrap();
for result in rdr_outs.records() {
let record: OUTs = result.unwrap().deserialize(None).unwrap();
outs.entry(record.item.clone())
.or_insert_with(Vec::new)
.push(OUTs { item: record.item, date: record.date, quantity: record.quantity});
};
// All above is executing correctly
// Now running the calculations related to each item:
// In this block i'm getting an error:
for item in items {
let in1 =ins.get(item.as_str()).unwrap();
let out1 = outs.get(item.as_str()).unwrap();
let batches_out = in1.process_all(&out1);
}
Error I got is:
error[E0599]: no method named `process_all` found for reference `&std::vec::Vec<Inventory>` in the current scope
--> src\main.rs:106:31
|
106 | let batches_out = in1.process_all(&out1);
| ^^^^^^^^^^^ method not found in `&std::vec::Vec<Inventory>`
Full code as of now is:
use std::collections::{VecDeque, HashMap, BTreeSet, BTreeMap};
use std::path::Path;
use serde::{Serialize, Deserialize};
#[derive(Clone, Debug, Serialize, Deserialize)]
struct INs {
item: String,
date: String,
batch: String,
quantity: u64,
}
#[derive(Clone, Debug, Deserialize)]
struct OUTs {
item: String,
date: String,
quantity: u64,
}
#[derive(Clone, Debug, PartialEq, Serialize)]
struct BatchesOut {
item: String,
batch: String,
date: String,
quantity: u64,
}
#[derive(Debug, Clone, Default, Serialize)]
struct Inventory {
inventory: VecDeque<INs>,
}
impl Inventory {
fn new() -> Self {
Default::default()
}
fn push(&mut self, item: INs) {
self.inventory.push_back(item);
}
fn process(&mut self, order: &mut OUTs) -> BatchesOut {
let next_item = self.inventory.front_mut().unwrap();
let num_items = std::cmp::min(next_item.quantity, order.quantity);
let batch = BatchesOut {
item: next_item.item.clone(),
batch: next_item.batch.clone(),
date: order.date.clone(),
quantity: num_items,
};
next_item.quantity -= num_items;
if next_item.quantity == 0 {
self.inventory.pop_front();
}
order.quantity -= num_items;
batch
}
pub fn process_all(&mut self, outs: &[OUTs]) -> Vec<BatchesOut> {
let mut batches = Vec::new();
for out in outs {
let mut out = out.clone();
while out.quantity > 0 {
batches.push(self.process(&mut out));
}
}
batches
}
}
fn main() {
let mut ins: BTreeMap<String, Vec<Inventory>> = BTreeMap::new();
let mut items = BTreeSet::new();
let file_path_ins = Path::new("src/ins.csv");
let mut rdr_ins = csv::ReaderBuilder::new()
.has_headers(true)
.delimiter(b',')
.from_path(file_path_ins).unwrap();
for result in rdr_ins.records() {
let record: INs = result.unwrap().deserialize(None).unwrap();
let mut buf = VecDeque::new();
buf.push_back(INs { item: record.item.clone(), date: record.date,
batch: record.batch, quantity: record.quantity, });
ins.entry(record.item.clone())
.or_insert_with(Vec::new)
.push(Inventory{ inventory: buf});
items.insert(record.item);
};
let mut outs: BTreeMap<String, Vec<OUTs>> = BTreeMap::new();
let file_path_outs = Path::new("src/outs.csv");
let mut rdr_outs = csv::ReaderBuilder::new()
.has_headers(true)
.from_path(file_path_outs).unwrap();
for result in rdr_outs.records() {
let record: OUTs = result.unwrap().deserialize(None).unwrap();
outs.entry(record.item.clone())
.or_insert_with(Vec::new)
.push(OUTs { item: record.item, date: record.date, quantity: record.quantity});
};
for item in items {
let in1 =ins.get(item.as_str()).unwrap();
let out1 = outs.get(item.as_str()).unwrap();
let batches_out = in1.process_all(&out1);
let file_path_batches_out = Path::new("src/batches_out.csv");
let mut wtr = csv::Writer::from_path(file_path_batches_out).unwrap();
batches_out.iter().for_each(|batch|
wtr.serialize(BatchesOut {
item: batch.item.to_string(),
batch: batch.batch.to_string(),
date: batch.date.to_string(),
quantity: batch.quantity,
}).unwrap()
);
wtr.flush().unwrap();
}
}
Cargo is:
[dependencies]
csv = "1.1.3"
serde = { version = "1.0.106", features = ["derive"] }