Fix #26 - use lambdas/a struct instead of macros

This commit is contained in:
Philpax 2022-05-24 14:04:05 +02:00
parent d290adfefa
commit 430c3dadcc

View File

@ -142,45 +142,41 @@ impl NodeTemplateTrait for MyNodeTemplate {
// The nodes are created empty by default. This function needs to take // The nodes are created empty by default. This function needs to take
// care of creating the desired inputs and outputs based on the template // care of creating the desired inputs and outputs based on the template
// We define some macros here to avoid boilerplate. Note that this is // We define some lambdas here to avoid boilerplate. Note that this is
// entirely optional. // entirely optional.
macro_rules! input { let input_scalar = |graph: &mut MyGraph, name: &str| {
(scalar $name:expr) => { graph.add_input_param(
graph.add_input_param( node_id,
node_id, name.to_string(),
$name.to_string(), MyDataType::Scalar,
MyDataType::Scalar, MyValueType::Scalar { value: 0.0 },
MyValueType::Scalar { value: 0.0 }, InputParamKind::ConnectionOrConstant,
InputParamKind::ConnectionOrConstant, true,
true, );
); };
}; let input_vector = |graph: &mut MyGraph, name: &str| {
(vector $name:expr) => { graph.add_input_param(
graph.add_input_param( node_id,
node_id, name.to_string(),
$name.to_string(), MyDataType::Vec2,
MyDataType::Vec2, MyValueType::Vec2 {
MyValueType::Vec2 { value: egui::vec2(0.0, 0.0),
value: egui::vec2(0.0, 0.0), },
}, InputParamKind::ConnectionOrConstant,
InputParamKind::ConnectionOrConstant, true,
true, );
); };
};
}
macro_rules! output { let output_scalar = |graph: &mut MyGraph, name: &str| {
(scalar $name:expr) => { graph.add_output_param(node_id, name.to_string(), MyDataType::Scalar);
graph.add_output_param(node_id, $name.to_string(), MyDataType::Scalar); };
}; let output_vector = |graph: &mut MyGraph, name: &str| {
(vector $name:expr) => { graph.add_output_param(node_id, name.to_string(), MyDataType::Vec2);
graph.add_output_param(node_id, $name.to_string(), MyDataType::Vec2); };
};
}
match self { match self {
MyNodeTemplate::AddScalar => { MyNodeTemplate::AddScalar => {
// The first input param doesn't use the macro so we can comment // The first input param doesn't use the lambda so we can comment
// it in more detail. // it in more detail.
graph.add_input_param( graph.add_input_param(
node_id, node_id,
@ -197,37 +193,37 @@ impl NodeTemplateTrait for MyNodeTemplate {
InputParamKind::ConnectionOrConstant, InputParamKind::ConnectionOrConstant,
true, true,
); );
input!(scalar "B"); input_scalar(graph, "B");
output!(scalar "out"); output_scalar(graph, "out");
} }
MyNodeTemplate::SubtractScalar => { MyNodeTemplate::SubtractScalar => {
input!(scalar "A"); input_scalar(graph, "A");
input!(scalar "B"); input_scalar(graph, "B");
output!(scalar "out"); output_scalar(graph, "out");
} }
MyNodeTemplate::VectorTimesScalar => { MyNodeTemplate::VectorTimesScalar => {
input!(scalar "scalar"); input_scalar(graph, "scalar");
input!(vector "vector"); input_vector(graph, "vector");
output!(vector "out"); output_vector(graph, "out");
} }
MyNodeTemplate::AddVector => { MyNodeTemplate::AddVector => {
input!(vector "v1"); input_vector(graph, "v1");
input!(vector "v2"); input_vector(graph, "v2");
output!(vector "out"); output_vector(graph, "out");
} }
MyNodeTemplate::SubtractVector => { MyNodeTemplate::SubtractVector => {
input!(vector "v1"); input_vector(graph, "v1");
input!(vector "v2"); input_vector(graph, "v2");
output!(vector "out"); output_vector(graph, "out");
} }
MyNodeTemplate::MakeVector => { MyNodeTemplate::MakeVector => {
input!(scalar "x"); input_scalar(graph, "x");
input!(scalar "y"); input_scalar(graph, "y");
output!(vector "out"); output_vector(graph, "out");
} }
MyNodeTemplate::MakeScalar => { MyNodeTemplate::MakeScalar => {
input!(scalar "value"); input_scalar(graph, "value");
output!(scalar "out"); output_scalar(graph, "out");
} }
} }
} }
@ -400,30 +396,38 @@ pub fn evaluate_node(
node_id: NodeId, node_id: NodeId,
outputs_cache: &mut OutputsCache, outputs_cache: &mut OutputsCache,
) -> anyhow::Result<MyValueType> { ) -> anyhow::Result<MyValueType> {
// Similar to when creating node types above, we define two macros for // To solve a similar problem as creating node types above, we define an
// convenience. They may be overkill for this small example, but something // Evaluator as a convenience. They may be overkill for this small example,
// like this makes the code much more readable when the number of nodes // but something like this makes the code much more readable when the
// starts growing. // number of nodes starts growing.
macro_rules! input {
(Vec2 $name:expr) => {
evaluate_input(graph, node_id, $name, outputs_cache)?.try_to_vec2()?
};
(Scalar $name:expr) => {
evaluate_input(graph, node_id, $name, outputs_cache)?.try_to_scalar()?
};
}
macro_rules! output { struct Evaluator<'a> {
(Vec2 $name:expr => $value:expr) => {{ graph: &'a MyGraph,
let out = MyValueType::Vec2 { value: $value }; outputs_cache: &'a mut OutputsCache,
populate_output(graph, outputs_cache, node_id, $name, out)?; node_id: NodeId,
Ok(out) }
}}; impl<'a> Evaluator<'a> {
(Scalar $name:expr => $value:expr) => {{ fn new(graph: &'a MyGraph, outputs_cache: &'a mut OutputsCache, node_id: NodeId) -> Self {
let out = MyValueType::Scalar { value: $value }; Self {
populate_output(graph, outputs_cache, node_id, $name, out)?; graph,
Ok(out) outputs_cache,
}}; node_id,
}
}
fn input_vector(&mut self, name: &str) -> anyhow::Result<egui::Vec2> {
evaluate_input(self.graph, self.node_id, name, self.outputs_cache)?.try_to_vec2()
}
fn input_scalar(&mut self, name: &str) -> anyhow::Result<f32> {
evaluate_input(self.graph, self.node_id, name, self.outputs_cache)?.try_to_scalar()
}
fn output_vector(&mut self, name: &str, value: egui::Vec2) -> anyhow::Result<MyValueType> {
let value = MyValueType::Vec2 { value };
populate_output(self.graph, self.outputs_cache, self.node_id, name, value)
}
fn output_scalar(&mut self, name: &str, value: f32) -> anyhow::Result<MyValueType> {
let value = MyValueType::Scalar { value };
populate_output(self.graph, self.outputs_cache, self.node_id, name, value)
}
} }
let node = &graph[node_id]; let node = &graph[node_id];
@ -431,7 +435,7 @@ pub fn evaluate_node(
MyNodeTemplate::AddScalar => { MyNodeTemplate::AddScalar => {
// Calling `evaluate_input` recursively evaluates other nodes in the // Calling `evaluate_input` recursively evaluates other nodes in the
// graph until the input value for a paramater has been computed. // graph until the input value for a paramater has been computed.
// This first call doesn't use the `input!` macro to illustrate what // This first call doesn't use the `Evaluator` to illustrate what
// is going on underneath. // is going on underneath.
let a = evaluate_input(graph, node_id, "A", outputs_cache)?.try_to_scalar()?; let a = evaluate_input(graph, node_id, "A", outputs_cache)?.try_to_scalar()?;
let b = evaluate_input(graph, node_id, "B", outputs_cache)?.try_to_scalar()?; let b = evaluate_input(graph, node_id, "B", outputs_cache)?.try_to_scalar()?;
@ -450,38 +454,43 @@ pub fn evaluate_node(
// Note that this is just one possible semantic interpretation of // Note that this is just one possible semantic interpretation of
// the graphs, you can come up with your own evaluation semantics! // the graphs, you can come up with your own evaluation semantics!
let out = MyValueType::Scalar { value: a + b }; let out = MyValueType::Scalar { value: a + b };
populate_output(graph, outputs_cache, node_id, "out", out)?; populate_output(graph, outputs_cache, node_id, "out", out)
Ok(out)
} }
MyNodeTemplate::SubtractScalar => { MyNodeTemplate::SubtractScalar => {
// Using the macros, the code gets as succint as it gets // Using the evaluator, the code gets as succint as it gets
let a = input!(Scalar "A"); let mut evaluator = Evaluator::new(graph, outputs_cache, node_id);
let b = input!(Scalar "B"); let a = evaluator.input_scalar("A")?;
output!(Scalar "out" => a - b) let b = evaluator.input_scalar("B")?;
evaluator.output_scalar("out", a - b)
} }
MyNodeTemplate::VectorTimesScalar => { MyNodeTemplate::VectorTimesScalar => {
let scalar = input!(Scalar "scalar"); let mut evaluator = Evaluator::new(graph, outputs_cache, node_id);
let vector = input!(Vec2 "vector"); let scalar = evaluator.input_scalar("scalar")?;
output!(Vec2 "out" => vector * scalar) let vector = evaluator.input_vector("vector")?;
evaluator.output_vector("out", vector * scalar)
} }
MyNodeTemplate::AddVector => { MyNodeTemplate::AddVector => {
let v1 = input!(Vec2 "v1"); let mut evaluator = Evaluator::new(graph, outputs_cache, node_id);
let v2 = input!(Vec2 "v2"); let v1 = evaluator.input_vector("v1")?;
output!(Vec2 "out" => v1 + v2) let v2 = evaluator.input_vector("v2")?;
evaluator.output_vector("out", v1 + v2)
} }
MyNodeTemplate::SubtractVector => { MyNodeTemplate::SubtractVector => {
let v1 = input!(Vec2 "v1"); let mut evaluator = Evaluator::new(graph, outputs_cache, node_id);
let v2 = input!(Vec2 "v2"); let v1 = evaluator.input_vector("v1")?;
output!(Vec2 "out" => v1 - v2) let v2 = evaluator.input_vector("v2")?;
evaluator.output_vector("out", v1 - v2)
} }
MyNodeTemplate::MakeVector => { MyNodeTemplate::MakeVector => {
let x = input!(Scalar "x"); let mut evaluator = Evaluator::new(graph, outputs_cache, node_id);
let y = input!(Scalar "y"); let x = evaluator.input_scalar("x")?;
output!(Vec2 "out" => egui::vec2(x, y)) let y = evaluator.input_scalar("y")?;
evaluator.output_vector("out", egui::vec2(x, y))
} }
MyNodeTemplate::MakeScalar => { MyNodeTemplate::MakeScalar => {
let value = input!(Scalar "value"); let mut evaluator = Evaluator::new(graph, outputs_cache, node_id);
output!(Scalar "out" => value) let value = evaluator.input_scalar("value")?;
evaluator.output_scalar("out", value)
} }
} }
} }
@ -492,10 +501,10 @@ fn populate_output(
node_id: NodeId, node_id: NodeId,
param_name: &str, param_name: &str,
value: MyValueType, value: MyValueType,
) -> anyhow::Result<()> { ) -> anyhow::Result<MyValueType> {
let output_id = graph[node_id].get_output(param_name)?; let output_id = graph[node_id].get_output(param_name)?;
outputs_cache.insert(output_id, value); outputs_cache.insert(output_id, value);
Ok(()) Ok(value)
} }
// Evaluates the input value of // Evaluates the input value of