Skip to content
Snippets Groups Projects
Commit c2020e55 authored by Xavier Routh's avatar Xavier Routh
Browse files

rustfmt

parent d1ad0099
No related branches found
No related tags found
1 merge request!28Draft: Interpreter, Tests, Debug Info, Misc.
......@@ -3,18 +3,17 @@
extern crate itertools;
extern crate ordered_float;
use self::itertools::Itertools;
use std::collections::VecDeque;
use std::collections::{HashMap, HashSet};
use std::convert::TryInto;
use std::panic;
use self::itertools::Itertools;
use std::collections::VecDeque;
use value::*;
extern crate hercules_ir;
extern crate hercules_opt;
use self::hercules_ir::*;
use self::hercules_opt::*;
......@@ -23,12 +22,13 @@ use self::hercules_opt::*;
* This module includes tools for itnerepreteing. Execution model / flow is based on
* "A Simple Graph-Based Intermediate Representation" Section 3 by Cliff Click.
*
* Eventually we would an *interactive* interpreter. I.e to be able to step forward and backwards
* In the far future we would an *interactive* interpreter. I.e to be able to step forward and backwards
* throughout the IR (idealy visually) and analyze results at certain steps in the computation.
*
*/
/* Hold state for interpreter blah blah */
pub struct IRInterpreter<'a> {
constants: &'a Vec<Constant>,
types: &'a Vec<Type>,
......@@ -37,141 +37,170 @@ pub struct IRInterpreter<'a> {
}
pub struct FunctionExecutionState<'a> {
control_subgraph: &'a Subgraph,
constants: &'a Vec<Constant>,
types: &'a Vec<Type>,
args: Vec<InterpreterVal>, // parameters
function: &'a Function,
phi_values: Vec<InterpreterVal>, // Map phis -> values (UNUSED)
def_use: &'a ImmutableDefUseMap,
dynamic_constants: &'a Vec<DynamicConstant>,
fork_join_map: &'a HashMap<NodeID, NodeID>, // Map forks -> joins
fork_join_nest: &'a HashMap<NodeID, Vec<NodeID>>,
// Map a vec of (thread indicies, reduce_node) -> Value
// Reduction values are shared across the fork / join pair that drives them, so they can't be local to a control token.
reduce_values: HashMap<(Vec<usize>, NodeID), InterpreterVal>, // Map (thread indicies, reduciton node) -> value
join_statuses: HashMap<(Vec<usize>, NodeID), usize>, // Counters until the joins are done!
reduce_values: HashMap<(Vec<usize>, NodeID), InterpreterVal>, // Map (thread indicies, reduction node) -> value
join_counters: HashMap<(Vec<usize>, NodeID), usize>, // Map (thread indicies, join_node) -> join_counter. Counters until the joins are done!
// Per module State.
module: &'a Module,
function_contexts: Vec<&'a FunctionContext>,
//
function_id: FunctionID,
}
/*
pub struct FunctionContext {
pub(crate) function: &'a Function,
pub(crate) types: &'a Vec<Type>,
pub(crate) constants: &'a Vec<Constant>,
pub(crate) dynamic_constants: &'a Vec<DynamicConstant>,
pub(crate) def_use: &'a ImmutableDefUseMap,
pub(crate) reverse_postorder: &'a Vec<NodeID>,
pub(crate) typing: &'a Vec<TypeID>,
pub(crate) control_subgraph: &'a Subgraph,
pub(crate) fork_join_map: &'a HashMap<NodeID, NodeID>,
pub(crate) fork_join_nest: &'a HashMap<NodeID, Vec<NodeID>>,
pub(crate) antideps: &'a Vec<(NodeID, NodeID)>,
pub(crate) bbs: &'a Vec<NodeID>,
} */
// Each control token stores a current position, and also a mapping of fork nodes -> thread idx.
control_subgraph: &'a Subgraph,
def_use: &'a ImmutableDefUseMap,
fork_join_map: &'a HashMap<NodeID, NodeID>, // Map forks -> joins
fork_join_nest: &'a HashMap<NodeID, Vec<NodeID>>,
}
impl FunctionContext {
pub fn new(
control_subgraph: &'a Subgraph,
def_use: &'a ImmutableDefUseMap,
fork_join_map: &'a HashMap<NodeID, NodeID>, // Map forks -> joins
fork_join_nest: &'a HashMap<NodeID, Vec<NodeID>>,
) -> FunctionContext {
FunctionContext {
control_subgraph,
def_use,
fork_join_map,
fork_join_nest,
}
}
}
// Each control token stores a current position, and also a mapping of fork nodes -> thread idx.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct ControlToken {
pub curr: NodeID,
pub prev: NodeID,
//pub thread_indicies: HashMap<NodeID, usize>, // Map forks -> thread indicies. (could be sparse slotmap)
pub thread_indicies: Vec<usize>, // Stack of thread_indicies.
// Both phi_values and reduce_values can be shared or duplicated depending on
// nesting , we rely on well-formed hercules IR inputs for no problems to occur.
// Portions of these maps will be shared according to the, instead of indexing them with
// a thread id and selectively including or unincluding the threadid during indexxing,
// (this is probably so much smarter), we store a separate copy of each map with the control tokens.
// On the other hand, this lets us eventually multihread easier?
pub prev: NodeID, // Used for properly latching PHI nodes, as they dependen on their control predecessor.
pub thread_indicies: Vec<usize>, // Stack of thread_indicies, can use fork_join nests to recover thread indicies per fork.
// Map phis -> values
// Oddly, these are stored per control token, when some other per-token data is stored in the FunctionExecutionState
// we could move this `phi_values` to be consistent, however that would make CoW behvaior optimization more difficult.
pub phi_values: HashMap<NodeID, InterpreterVal>, // TODO: Optimize this to a Cow<HashMap>, or optimize this whole thing.
}
impl<'a> FunctionExecutionState<'a> {
pub fn new(args: Vec<InterpreterVal>, control_subgraph: &'a Subgraph, module: &'a Module, def_use: &'a ImmutableDefUseMap, dynamic_constants: &'a Vec<DynamicConstant>, fork_join_map: &'a HashMap<NodeID, NodeID>, fork_join_nest: &'a HashMap<NodeID, Vec<NodeID>>, function: &'a Function) -> Self {
pub fn new(
args: Vec<InterpreterVal>,
module: &'a Module,
function_id: &'a FunctionID,
function_contexts: Vec<&'a FunctionContext>,
) -> Self {
debug_assert_eq!(args.len(), function.param_types.len());
FunctionExecutionState {
control_subgraph,
constants: &module.constants,
types: &module.types,
function,
phi_values: vec![InterpreterVal::Integer8(0).clone(); function.nodes.len()], // Sprase slotmap
args,
def_use,
dynamic_constants,
fork_join_map,
fork_join_nest,
reduce_values: HashMap::new(),
join_statuses: HashMap::new(),
join_counters: HashMap::new(),
module,
function_contexts,
function_id,
}
}
pub fn handle_region(&mut self, token: &mut ControlToken, prev: NodeID, node: NodeID, preds: &Box<[NodeID]>) -> NodeID {
pub fn get_control_subgraph(&self) -> &Subgraph {
self.function_contexts[self.function_id.idx()].control_subgraph
}
pub fn handle_region(
&mut self,
token: &mut ControlToken,
prev: NodeID,
node: NodeID,
preds: &Box<[NodeID]>,
) -> NodeID {
// Gather PHI nodes for this region node.
let phis: Vec<NodeID> = self.def_use.get_users(node).into_iter().filter_map(
|n| {
if self.function.nodes[n.idx()].is_phi() {
Some(*n)
}
else {
None
}
}).collect();
// Find incoming edge of this region.
println!("prev: {:?} ", prev);
let phis: Vec<NodeID> = self
.def_use
.get_users(node)
.into_iter()
.filter_map(|n| {
if self.function.nodes[n.idx()].is_phi() {
Some(*n)
} else {
None
}
})
.collect();
// println!("prev: {:?} ", prev);
//
let (edge_num, _) = preds.into_iter().find_position(|pred| {**pred == prev}).expect("PANIC: invalid incoming edge.");
// Find incoming edge of this region.
let (edge_num, _) = preds
.into_iter()
.find_position(|pred| **pred == prev)
.expect("PANIC: invalid incoming edge.");
let mut to_latch = vec![];
// Get values to latch at this point.
for phi in phis {
to_latch.push(self.handle_phi(token, edge_num, phi));
to_latch.push(self.handle_phi(token, edge_num, phi));
}
// Write values into map.
for (phi, value) in to_latch {
token.phi_values.insert(phi, value);
}
// Return the (single) control successor of the region node
self.control_subgraph.succs(node).next().unwrap()
self.get_control_subgraph().succs(node).next().unwrap()
}
pub fn handle_phi(&mut self, token: &ControlToken, edge: usize, phi: NodeID) -> (NodeID, InterpreterVal) {
let (control, data) = &self.function.nodes[phi.idx()].try_phi().expect("PANIC: handle_phi on non-phi node.");
pub fn handle_phi(
&mut self,
token: &ControlToken,
edge: usize,
phi: NodeID,
) -> (NodeID, InterpreterVal) {
let (control, data) = &self.function.nodes[phi.idx()]
.try_phi()
.expect("PANIC: handle_phi on non-phi node.");
let value_node = data[edge];
println!("Latching PHI value of node {:?}", value_node.idx());
let value = self.handle_data(token, value_node);
(phi, value)
}
// Drive all reductions for this join.
// Drive all reductions for this join.
// TODO: Change all these to return vecs of ControlTokens, so the function interfaces are consistent and make sense .
pub fn handle_join(&mut self, mut token: ControlToken, join: NodeID) -> Option<ControlToken> {
let reduces: Vec<NodeID> = self.def_use.get_users(join).into_iter().filter_map(
|n| {
if self.function.nodes[n.idx()].is_reduce() {
Some(*n)
}
else {
None
}
}).collect();
let reduces: Vec<NodeID> = self
.def_use
.get_users(join)
.into_iter()
.filter_map(|n| {
if self.function.nodes[n.idx()].is_reduce() {
Some(*n)
} else {
None
}
})
.collect();
for reduction in reduces {
self.handle_reduction(&token, reduction);
}
let thread_values = self.get_thread_factors(&token, join);
// dbg!(thread_values.clone());
// This and_modify doesn't do aynthing??
self.join_statuses.entry((thread_values.clone(), join)).and_modify(|v| {*v -= 1});
if *self.join_statuses.get(&(thread_values, join)).expect("PANIC: join counter not initialized") == 0 {
self.join_counters
.entry((thread_values.clone(), join))
.and_modify(|v| *v -= 1);
if *self
.join_counters
.get(&(thread_values, join))
.expect("PANIC: join counter not initialized")
== 0
{
let curr = token.curr;
token.prev = curr;
token.thread_indicies.pop(); // Get rid of this thread index.
......@@ -184,8 +213,11 @@ impl<'a> FunctionExecutionState<'a> {
// Get top N-1 thread factors, where N is the depth of the control token.
pub fn get_thread_factors(&self, token: &ControlToken, control_node: NodeID) -> Vec<usize> {
let nested_forks = self.fork_join_nest.get(&control_node).expect("PANIC: invalid node for fork join nest.").clone();
let nested_forks = self
.fork_join_nest
.get(&control_node)
.expect("PANIC: invalid node for fork join nest.")
.clone();
// Take the top N entries such that it matches the length of the TRF in the control token.
// Get the depth of the control token that is requesting this reduction node.
......@@ -195,63 +227,85 @@ impl<'a> FunctionExecutionState<'a> {
let len = fork_levels - 1;
//println!("len: {:?}", len);
let mut thread_values = token.thread_indicies.clone();
thread_values.truncate(len);
//println!("final values: {:?}", thread_values.clone());
thread_values
}
// Drive the reduction, this will be called for each control token.
pub fn handle_reduction(&mut self, token: &ControlToken, reduce: NodeID) {
// control (join), initializer, reduction application?
let (control, init, reduct) = &self.function.nodes[reduce.idx()].try_reduce().expect("PANIC: handle_reduction on non-reduce node.");
// Drive the reduction, this will be invoked for each control token.
pub fn handle_reduction(&mut self, token: &ControlToken, reduce: NodeID) {
let (control, init, reduct) = &self.function.nodes[reduce.idx()]
.try_reduce()
.expect("PANIC: handle_reduction on non-reduce node.");
let thread_values = self.get_thread_factors(token, *control);
// If empty set to default (figure out how to not repeat this check)
// TODO (Can we do it upon entry to the fork node?)
// It is UB? to have the initializer depend on things within the fork-join section, do we check for that?
// If empty set to default (figure out how to not repeat this check)
// TODO: (Can we do it upon entry to the fork node?)
let init = self.handle_data(&token, *init);
self.reduce_values.entry((thread_values.clone(), reduce)).or_insert(init);
// Q: It is UB to have the initializer depend on things within the fork-join section? do we check for that?
// A: In general it is not our responsibility to check for poorly defined IR, that can be done by some other pass.
self.reduce_values
.entry((thread_values.clone(), reduce))
.or_insert(init);
let data = self.handle_data(&token, *reduct);
println!("reduction write: {:?}, {:?}, {:?}", thread_values, reduce, data);
println!(
"reduction write: {:?}, {:?}, {:?}",
thread_values, reduce, data
);
self.reduce_values.insert((thread_values, reduce), data);
}
pub fn handle_data(&mut self, token: &ControlToken, node: NodeID) -> InterpreterVal {
println!("Data Node: {} {:?}", node.idx(), &self.function.nodes[node.idx()]);
// println!("Data Node: {} {:?}", node.idx(), &self.function.nodes[node.idx()]);
match &self.function.nodes[node.idx()] {
Node::Phi { control, data } => {
(*token.phi_values.get(&node).expect("PANIC: Phi value not latched.")).clone()
}
Node::Phi { control, data } => (*token
.phi_values
.get(&node)
.expect("PANIC: Phi value not latched."))
.clone(),
Node::ThreadID { control } => {
// `control` is the fork that drives this node.
let nesting_level = self.fork_join_nest.get(control).expect("PANIC: No nesting information for thread index!").len();
let nesting_level = self
.fork_join_nest
.get(control)
.expect("PANIC: No nesting information for thread index!")
.len();
let v = token.thread_indicies[nesting_level - 1]; // Might have to -1?
InterpreterVal::DynamicConstant((v).into())
}
// If we read from a reduction that is the same depth as this thread, we need to write back to it before anyone else reads from it.
Node::Reduce { control, init, reduct } => {
// If we read from a reduction that is the same depth as this thread, we need to write back to it before anyone else reads from it.
// This probably isn't the exact condition, but somethign similar. Anyways, we achieve correctness by iterating control nodes recursively.
Node::Reduce {
control,
init,
reduct,
} => {
let thread_values = self.get_thread_factors(token, *control);
let init = self.handle_data(&token, *init);
println!("reduction read: {:?}, {:?}", thread_values, node);
let val = self.reduce_values.entry((thread_values, node)).or_insert(init).clone();
let val = self
.reduce_values
.entry((thread_values, node))
.or_insert(init)
.clone();
println!("value: {:?}", val.clone());
val
}
Node::Parameter { index } => {
self.args[*index].clone()
}
Node::Parameter { index } => self.args[*index].clone(),
Node::Constant { id } => {
let con = &self.constants[id.idx()];
InterpreterVal::from_constant(con, &self.constants, &self.types, &self.dynamic_constants)
InterpreterVal::from_constant(
con,
&self.constants,
&self.types,
&self.dynamic_constants,
)
}
Node::DynamicConstant { id } => {
let dyn_con = &self.dynamic_constants[id.idx()];
......@@ -260,23 +314,28 @@ impl<'a> FunctionExecutionState<'a> {
DynamicConstant::Parameter(v) => todo!(),
};
// TODO: Figure out what type / semantics are of thread ID and dynamic const.
InterpreterVal::DynamicConstant((*v).into())
InterpreterVal::DynamicConstant((*v).into())
}
Node::Unary { input, op } => {
let val = self.handle_data(token, *input);
InterpreterVal::unary_op(*op, val)
},
}
Node::Binary { left, right, op } => {
let left = self.handle_data(token, *left);
let right = self.handle_data(token, *right);
InterpreterVal::binary_op(*op, left, right)
}
Node::Ternary { first, second, third, op } => {
Node::Ternary {
first,
second,
third,
op,
} => {
let first = self.handle_data(token, *first);
let second = self.handle_data(token, *second);
let third = self.handle_data(token, *third);
// let third =
// let third =
match op {
TernaryOperator::Select => {
if (first == InterpreterVal::Boolean(true)) {
......@@ -287,7 +346,11 @@ impl<'a> FunctionExecutionState<'a> {
}
}
}
Node::Call { function, dynamic_constants, args } => todo!(),
Node::Call {
function,
dynamic_constants,
args,
} => todo!(),
Node::Read { collect, indices } => {
// TODO: Panic on control-indicies.
let collection = self.handle_data(token, *collect);
......@@ -295,35 +358,49 @@ impl<'a> FunctionExecutionState<'a> {
println!("collection: {:?}", collection);
self.handle_read(token, collection, indices)
}
Node::Write { collect, data, indices } => {
Node::Write {
collect,
data,
indices,
} => {
let collection = self.handle_data(token, *collect);
let data = self.handle_data(token, *data);
self.handle_write(token, collection, data, indices)
}
_ => todo!()
_ => todo!(),
}
}
pub fn handle_write(&mut self, token: &ControlToken, collection: InterpreterVal, data: InterpreterVal, indices: &[Index]) -> InterpreterVal {
let index = &indices[0];
pub fn handle_write(
&mut self,
token: &ControlToken,
collection: InterpreterVal,
data: InterpreterVal,
indices: &[Index],
) -> InterpreterVal {
let index = &indices[0];
//println!("first index: {:?}", index);
// TODO: Recurse on writes correctly
let val = match index {
Index::Field(_) => todo!(),
Index::Variant(_) => todo!(),
Index::Position(array_indices) => { // Arrays also have inner indices...
Index::Position(array_indices) => {
// Arrays also have inner indices...
// Recover dimensional data from types.
let array_indices: Vec<_> = array_indices.into_iter().map(|idx| self.handle_data(token, *idx).as_usize()).collect();
let array_indices: Vec<_> = array_indices
.into_iter()
.map(|idx| self.handle_data(token, *idx).as_usize())
.collect();
// TODO: Implemenet . try_array() and other try_conversions on the InterpreterVal type
if let InterpreterVal::Array(type_id, mut vals) = collection {
// TODO: Make this its own funciton to reuse w/ array_size
let extents: Vec<_> = self.types[type_id.idx()].try_extents().expect("PANIC: wrong type for array").into_iter().map(|extent|
self.dynamic_constants[extent.idx()].value()
).collect();
let extents: Vec<_> = self.types[type_id.idx()]
.try_extents()
.expect("PANIC: wrong type for array")
.into_iter()
.map(|extent| self.dynamic_constants[extent.idx()].value())
.collect();
let idx = InterpreterVal::array_idx(&extents, &array_indices);
//println!("idx: {:?}", idx);
vals[idx] = data;
......@@ -331,42 +408,54 @@ impl<'a> FunctionExecutionState<'a> {
} else {
panic!("PANIC: Position index on not an array")
}
},
Index::Control(_) => panic!("PANIC: Unexpected control read.")
}
Index::Control(_) => panic!("PANIC: Unexpected control read."),
};
val
}
pub fn handle_read(&mut self, token: &ControlToken, collection: InterpreterVal, indices: &[Index]) -> InterpreterVal {
let index = &indices[0];
pub fn handle_read(
&mut self,
token: &ControlToken,
collection: InterpreterVal,
indices: &[Index],
) -> InterpreterVal {
let index = &indices[0];
//println!("first index: {:?}", index);
let val = match index {
Index::Field(_) => todo!(),
Index::Variant(_) => todo!(),
Index::Position(array_indices) => { // Arrays also have inner indices...
Index::Position(array_indices) => {
// Arrays also have inner indices...
// Recover dimensional data from types.
let array_indices: Vec<_> = array_indices.into_iter().map(|idx| self.handle_data(token, *idx).as_usize()).collect();
let array_indices: Vec<_> = array_indices
.into_iter()
.map(|idx| self.handle_data(token, *idx).as_usize())
.collect();
// TODO: Implemenet . try_array() and other try_conversions on the InterpreterVal type
if let InterpreterVal::Array(type_id, vals) = collection {
// TODO: Make this its own funciton to reuse w/ array_size
let extents: Vec<_> = self.types[type_id.idx()].try_extents().expect("PANIC: wrong type for array").into_iter().map(|extent|
self.dynamic_constants[extent.idx()].value()
).collect();
let extents: Vec<_> = self.types[type_id.idx()]
.try_extents()
.expect("PANIC: wrong type for array")
.into_iter()
.map(|extent| self.dynamic_constants[extent.idx()].value())
.collect();
vals[InterpreterVal::array_idx(&extents, &array_indices)].clone()
} else {
panic!("PANIC: Position index on not an array")
}
},
Index::Control(_) => panic!("PANIC: Unexpected control read.")
}
Index::Control(_) => panic!("PANIC: Unexpected control read."),
};
// Recurse
if indices.len() == 1 {
val
val
} else {
self.handle_read(token, val, &indices[1..])
}
......@@ -382,20 +471,30 @@ impl<'a> FunctionExecutionState<'a> {
.0,
);
let start_token = ControlToken { curr: start_node, prev: start_node, thread_indicies: Vec::new(), phi_values: HashMap::new() };
let start_token = ControlToken {
curr: start_node,
prev: start_node,
thread_indicies: Vec::new(),
phi_values: HashMap::new(),
};
let mut live_tokens: Vec<ControlToken> = Vec::new();
let mut live_tokens: Vec<ControlToken> = Vec::new();
live_tokens.push(start_token);
let mut next_live_tokens: Vec<ControlToken> = Vec::new();
// Each control token stores a (current, previous) position, and also a mapping of fork nodes -> thread idx.
let mut next_live_tokens: Vec<ControlToken> = Vec::new();
// Each control token stores a (current, previous) position, and also a mapping of fork nodes -> thread idx.
// This is a disaster.
// To do reduction nodes correctly we have to traverse control nodes pre-order.
// To do reduction nodes correctly we have to traverse control tokens in a depth-first fashion (i.e immediately handle spawned threads).
'outer: loop {
let mut ctrl_token = live_tokens.pop().expect("a");
println!("\n\nNew Token at: Control State: {} threads: {:?}, {:?}", ctrl_token.curr.idx(), ctrl_token.thread_indicies.clone(), &self.function.nodes[ctrl_token.curr.idx()]);
println!(
"\n\nNew Token at: Control State: {} threads: {:?}, {:?}",
ctrl_token.curr.idx(),
ctrl_token.thread_indicies.clone(),
&self.function.nodes[ctrl_token.curr.idx()]
);
let mut new_tokens = match &self.function.nodes[ctrl_token.curr.idx()] {
Node::Start => {
......@@ -425,15 +524,18 @@ impl<'a> FunctionExecutionState<'a> {
// Convert condition to usize
let cond: usize = match cond {
InterpreterVal::Boolean(v) => v.into(),
_ => panic!("PANIC: Invalid condition for IF, please typecheck.")
_ => panic!("PANIC: Invalid condition for IF, please typecheck."),
};
// Find control read succesor that matches the condition.
let next = self.control_subgraph.succs(ctrl_token.curr)
.find(|n|
{self.function.nodes[n.idx()]
.try_control_read(cond)
.is_some()})
let next = self
.control_subgraph
.succs(ctrl_token.curr)
.find(|n| {
self.function.nodes[n.idx()]
.try_control_read(cond)
.is_some()
})
.expect("PANIC: No outgoing valid outgoing edge.");
println!("{:?}", next);
......@@ -442,22 +544,23 @@ impl<'a> FunctionExecutionState<'a> {
ctrl_token.prev = curr;
vec![ctrl_token]
}
Node::Match { control, sum } => todo!(),
Node::Fork { control, factor } => {
let fork = ctrl_token.curr;
// Find all joins and initialize them, and set their reduction counters as well
// Find all joins and initialize them, and set their reduction counters as well.
let dyn_con = &self.dynamic_constants[factor.idx()];
let factor = match dyn_con {
DynamicConstant::Constant(v) => v,
DynamicConstant::Parameter(v) => todo!(),
};
let next = self.control_subgraph.succs(ctrl_token.curr).nth(0).unwrap();
// TODO: Clean this up:
let next = self.get_control_subgraph().succs(ctrl_token.curr).nth(0).unwrap();
let curr = ctrl_token.curr;
ctrl_token.curr = next;
ctrl_token.prev = curr;
let mut tokens_to_add = Vec::with_capacity(*factor);
// TODO: Better way to write this loop?
......@@ -468,17 +571,20 @@ impl<'a> FunctionExecutionState<'a> {
}
let thread_factors = self.get_thread_factors(&ctrl_token, ctrl_token.curr);
let join = self.fork_join_map.get(&curr).expect("PANIC: fork missing a join.");
let join = self
.fork_join_map
.get(&curr)
.expect("PANIC: fork missing a join.");
println!("thread factors for join; {:?} {:?}", thread_factors.clone(), *join);
self.join_statuses.insert((thread_factors, *join), *factor);
//println!("thread factors for join; {:?} {:?}", thread_factors.clone(), *join);
self.join_counters.insert((thread_factors, *join), *factor);
tokens_to_add
}
Node::Join { control } => {
// Only make a control token if the join is finished.
// Only make a control token if the join is finished.
let join = ctrl_token.curr;
if let Some(next) = self.handle_join(ctrl_token, join) {
vec![next]
} else {
......@@ -489,10 +595,16 @@ impl<'a> FunctionExecutionState<'a> {
let result = self.handle_data(&ctrl_token, *data);
println!("result = {:?}", result);
break 'outer;
}
Node::Call { function, dynamic_constants, args } => todo!(),
}
Node::Call {
function,
dynamic_constants,
args,
} => {
todo!() //let function =
}
_ => panic!("PANIC: Unexpected node in control subgraph"),
};
};
for i in new_tokens {
live_tokens.push(i);
......@@ -502,8 +614,6 @@ impl<'a> FunctionExecutionState<'a> {
//next_live_tokens.clear()
}
}
}
impl<'a> IRInterpreter<'a> {
......
extern crate clap;
extern crate rand;
extern crate hercules_opt;
extern crate hercules_ir;
extern crate hercules_opt;
extern crate rand;
pub mod interpreter;
pub mod value;
use interpreter::*;
use value::*;
use std::fs::File;
use std::io::prelude::*;
use value::*;
use self::hercules_ir::*;
use self::hercules_opt::*;
......@@ -51,7 +51,6 @@ fn main() {
pm.add_pass(hercules_opt::pass::Pass::DCE);
pm.add_pass(hercules_opt::pass::Pass::Xdot(true));
pm.run_passes();
pm.make_reverse_postorders();
pm.make_doms();
......@@ -68,16 +67,36 @@ fn main() {
let control_subgraphs = pm.control_subgraphs.as_ref().unwrap().clone();
let def_uses = pm.def_uses.as_ref().unwrap().clone();
let module = pm.get_module();
let mut function_contexts = vec![];
// TODO: We should consider moving to slotmaps.
for (idx, function) in module.functions.into_iter().enumerate() {
let context = FunctionContext::new(
&control_subgraphs[idx],
&def_uses[idx],
&fork_join_maps[idx],
&fork_join_nests[idx],
);
function_contexts.push(context);
}
let dynamic_constants = vec![DynamicConstant::Constant(2); 3];
//let vec = vec![InterpreterVal::Integer32(15); 12];
let matrix1 = vec![InterpreterVal::Integer32(2), InterpreterVal::Integer32(3),
InterpreterVal::Integer32(4), InterpreterVal::Integer32(1)];
let matrix2 = vec![InterpreterVal::Integer32(2), InterpreterVal::Integer32(3),
InterpreterVal::Integer32(4), InterpreterVal::Integer32(1)];
let matrix1 = vec![
InterpreterVal::Integer32(2),
InterpreterVal::Integer32(3),
InterpreterVal::Integer32(4),
InterpreterVal::Integer32(1),
];
let matrix2 = vec![
InterpreterVal::Integer32(2),
InterpreterVal::Integer32(3),
InterpreterVal::Integer32(4),
InterpreterVal::Integer32(1),
];
//let array = InterpreterVal::Array(TypeID::new(0), bingle.into_boxed_slice());
//let args = vec![];
......@@ -89,19 +108,17 @@ fn main() {
let array_type = parameter_types[0];
println!("type: {:?}", array_type);
let args = vec![InterpreterVal::Array(parameter_types[0], matrix1.into_boxed_slice()), InterpreterVal::Array(parameter_types[1], matrix2.into_boxed_slice())];
let args = vec![
InterpreterVal::Array(parameter_types[0], matrix1.into_boxed_slice()),
InterpreterVal::Array(parameter_types[1], matrix2.into_boxed_slice()),
];
let mut state = interpreter::FunctionExecutionState::new(
args,
&control_subgraphs[function_number],
&module,
&def_uses[function_number],
&dynamic_constants,
&fork_join_maps[function_number],
&fork_join_nests[function_number],
&module.functions[function_number]
function_contexts,
function_number,
);
state.run();
}
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment