diff --git a/editor/Cargo.toml b/editor/Cargo.toml index 1f6af599ca..a39aa5ef2e 100644 --- a/editor/Cargo.toml +++ b/editor/Cargo.toml @@ -2,7 +2,7 @@ name = "graphite-editor" publish = false version = "0.0.0" -rust-version = "1.85" +rust-version = "1.88" authors = ["Graphite Authors "] edition = "2024" readme = "../README.md" diff --git a/editor/src/messages/portfolio/document/utility_types/network_interface.rs b/editor/src/messages/portfolio/document/utility_types/network_interface.rs index f30110e3a4..290247f621 100644 --- a/editor/src/messages/portfolio/document/utility_types/network_interface.rs +++ b/editor/src/messages/portfolio/document/utility_types/network_interface.rs @@ -6804,13 +6804,6 @@ impl From for DocumentNodePersisten } } -#[derive(serde::Serialize, serde::Deserialize)] -enum NodePersistentMetadataVersions { - DocumentNodePersistentMetadataPropertiesRow(DocumentNodePersistentMetadataPropertiesRow), - NodePersistentMetadataInputNames(DocumentNodePersistentMetadataInputNames), - NodePersistentMetadata(DocumentNodePersistentMetadata), -} - fn deserialize_node_persistent_metadata<'de, D>(deserializer: D) -> Result where D: serde::Deserializer<'de>, diff --git a/editor/src/messages/tool/common_functionality/shape_editor.rs b/editor/src/messages/tool/common_functionality/shape_editor.rs index f98e50f079..34fda01a34 100644 --- a/editor/src/messages/tool/common_functionality/shape_editor.rs +++ b/editor/src/messages/tool/common_functionality/shape_editor.rs @@ -1000,7 +1000,7 @@ impl ShapeState { } else { // Push both in and out handles into the correct position for ((handle, sign), other_anchor) in handles.iter().zip([1., -1.]).zip(&anchor_positions) { - let Some(anchor_vector) = other_anchor.map(|position| (position - anchor_position)) else { + let Some(anchor_vector) = other_anchor.map(|position| position - anchor_position) else { continue; }; diff --git a/frontend/wasm/Cargo.toml b/frontend/wasm/Cargo.toml index 2843820124..13e45bd07e 100644 --- a/frontend/wasm/Cargo.toml +++ b/frontend/wasm/Cargo.toml @@ -2,7 +2,7 @@ name = "graphite-wasm" publish = false version = "0.0.0" -rust-version = "1.85" +rust-version = "1.88" authors = ["Graphite Authors "] edition = "2024" readme = "../../README.md" @@ -13,7 +13,7 @@ license = "Apache-2.0" [features] default = ["gpu"] gpu = ["editor/gpu"] -tauri = [ "editor/tauri"] +tauri = ["editor/tauri"] [lib] crate-type = ["cdylib", "rlib"] diff --git a/libraries/bezier-rs/src/poisson_disk.rs b/libraries/bezier-rs/src/poisson_disk.rs index cd2d8d45f0..42cb904a8a 100644 --- a/libraries/bezier-rs/src/poisson_disk.rs +++ b/libraries/bezier-rs/src/poisson_disk.rs @@ -169,7 +169,7 @@ where A::Item: Clone, B::Item: Clone, { - a.flat_map(move |i| (b.clone().map(move |j| (i.clone(), j)))) + a.flat_map(move |i| b.clone().map(move |j| (i.clone(), j))) } /// A square (represented by its top left corner position and width/height of `square_size`) that is currently a candidate for targetting by the dart throwing process. diff --git a/libraries/path-bool/src/path_boolean.rs b/libraries/path-bool/src/path_boolean.rs index c426b0db88..fa858e9360 100644 --- a/libraries/path-bool/src/path_boolean.rs +++ b/libraries/path-bool/src/path_boolean.rs @@ -1086,7 +1086,7 @@ fn compute_dual(minor_graph: &MinorGraph) -> Result { let outer_face_key = if count != 1 { #[cfg(feature = "logging")] eprintln!("Found multiple outer faces: {areas:?}, falling back to area calculation"); - let (key, _) = *areas.iter().max_by_key(|(_, area)| ((area.abs() * 1000.) as u64)).unwrap(); + let (key, _) = *areas.iter().max_by_key(|(_, area)| (area.abs() * 1000.) as u64).unwrap(); *key } else { *windings diff --git a/node-graph/gcore/src/instances.rs b/node-graph/gcore/src/instances.rs index ca76745e03..4f0442edda 100644 --- a/node-graph/gcore/src/instances.rs +++ b/node-graph/gcore/src/instances.rs @@ -27,6 +27,24 @@ impl Instances { } } + pub fn new_instance(instance: Instance) -> Self { + Self { + instance: vec![instance.instance], + transform: vec![instance.transform], + alpha_blending: vec![instance.alpha_blending], + source_node_id: vec![instance.source_node_id], + } + } + + pub fn with_capacity(capacity: usize) -> Self { + Self { + instance: Vec::with_capacity(capacity), + transform: Vec::with_capacity(capacity), + alpha_blending: Vec::with_capacity(capacity), + source_node_id: Vec::with_capacity(capacity), + } + } + pub fn push(&mut self, instance: Instance) { self.instance.push(instance.instance); self.transform.push(instance.transform); @@ -161,6 +179,18 @@ unsafe impl StaticType for Instances { type Static = Instances; } +impl FromIterator> for Instances { + fn from_iter>>(iter: I) -> Self { + let iter = iter.into_iter(); + let (lower, _) = iter.size_hint(); + let mut instances = Self::with_capacity(lower); + for instance in iter { + instances.push(instance); + } + instances + } +} + fn one_daffine2_default() -> Vec { vec![DAffine2::IDENTITY] } diff --git a/node-graph/gcore/src/vector/algorithms/poisson_disk.rs b/node-graph/gcore/src/vector/algorithms/poisson_disk.rs index da42a486de..7a3445f10b 100644 --- a/node-graph/gcore/src/vector/algorithms/poisson_disk.rs +++ b/node-graph/gcore/src/vector/algorithms/poisson_disk.rs @@ -182,7 +182,7 @@ where A::Item: Clone, B::Item: Clone, { - a.flat_map(move |i| (b.clone().map(move |j| (i.clone(), j)))) + a.flat_map(move |i| b.clone().map(move |j| (i.clone(), j))) } /// A square (represented by its top left corner position and width/height of `square_size`) that is currently a candidate for targetting by the dart throwing process. diff --git a/node-graph/gcore/src/vector/vector_data.rs b/node-graph/gcore/src/vector/vector_data.rs index 3523784545..04a929ed23 100644 --- a/node-graph/gcore/src/vector/vector_data.rs +++ b/node-graph/gcore/src/vector/vector_data.rs @@ -337,7 +337,7 @@ impl VectorData { /// Returns the number of linear segments connected to the given point. pub fn connected_linear_segments(&self, point_id: PointId) -> usize { self.segment_bezier_iter() - .filter(|(_, bez, start, end)| ((*start == point_id || *end == point_id) && matches!(bez.handles, BezierHandles::Linear))) + .filter(|(_, bez, start, end)| (*start == point_id || *end == point_id) && matches!(bez.handles, BezierHandles::Linear)) .count() } diff --git a/node-graph/gcore/src/vector/vector_nodes.rs b/node-graph/gcore/src/vector/vector_nodes.rs index 93e732938b..ca2349feed 100644 --- a/node-graph/gcore/src/vector/vector_nodes.rs +++ b/node-graph/gcore/src/vector/vector_nodes.rs @@ -443,110 +443,109 @@ async fn round_corners( #[default(5.)] min_angle_threshold: Angle, ) -> VectorDataTable { - let mut result_table = VectorDataTable::default(); - - for source in source.instance_ref_iter() { - let source_transform = *source.transform; - let source_transform_inverse = source_transform.inverse(); - let source = source.instance; + source + .instance_ref_iter() + .map(|source| { + let source_transform = *source.transform; + let source_transform_inverse = source_transform.inverse(); + let source = source.instance; - let upstream_graphic_group = source.upstream_graphic_group.clone(); + let upstream_graphic_group = source.upstream_graphic_group.clone(); - // Flip the roundness to help with user intuition - let roundness = 1. - roundness; - // Convert 0-100 to 0-0.5 - let edge_length_limit = edge_length_limit * 0.005; + // Flip the roundness to help with user intuition + let roundness = 1. - roundness; + // Convert 0-100 to 0-0.5 + let edge_length_limit = edge_length_limit * 0.005; - let mut result = VectorData { - style: source.style.clone(), - ..Default::default() - }; - - // Grab the initial point ID as a stable starting point - let mut initial_point_id = source.point_domain.ids().first().copied().unwrap_or(PointId::generate()); - - for mut subpath in source.stroke_bezier_paths() { - subpath.apply_transform(source_transform); + let mut result = VectorData { + style: source.style.clone(), + ..Default::default() + }; - // End if not enough points for corner rounding - if subpath.manipulator_groups().len() < 3 { - result.append_subpath(subpath, false); - continue; - } + // Grab the initial point ID as a stable starting point + let mut initial_point_id = source.point_domain.ids().first().copied().unwrap_or(PointId::generate()); - let groups = subpath.manipulator_groups(); - let mut new_groups = Vec::new(); - let is_closed = subpath.closed(); + for mut subpath in source.stroke_bezier_paths() { + subpath.apply_transform(source_transform); - for i in 0..groups.len() { - // Skip first and last points for open paths - if !is_closed && (i == 0 || i == groups.len() - 1) { - new_groups.push(groups[i]); + // End if not enough points for corner rounding + if subpath.manipulator_groups().len() < 3 { + result.append_subpath(subpath, false); continue; } - // Not the prettiest, but it makes the rest of the logic more readable - let prev_idx = if i == 0 { if is_closed { groups.len() - 1 } else { 0 } } else { i - 1 }; - let curr_idx = i; - let next_idx = if i == groups.len() - 1 { if is_closed { 0 } else { i } } else { i + 1 }; + let groups = subpath.manipulator_groups(); + let mut new_groups = Vec::new(); + let is_closed = subpath.closed(); - let prev = groups[prev_idx].anchor; - let curr = groups[curr_idx].anchor; - let next = groups[next_idx].anchor; + for i in 0..groups.len() { + // Skip first and last points for open paths + if !is_closed && (i == 0 || i == groups.len() - 1) { + new_groups.push(groups[i]); + continue; + } - let dir1 = (curr - prev).normalize_or(DVec2::X); - let dir2 = (next - curr).normalize_or(DVec2::X); + // Not the prettiest, but it makes the rest of the logic more readable + let prev_idx = if i == 0 { if is_closed { groups.len() - 1 } else { 0 } } else { i - 1 }; + let curr_idx = i; + let next_idx = if i == groups.len() - 1 { if is_closed { 0 } else { i } } else { i + 1 }; - let theta = PI - dir1.angle_to(dir2).abs(); + let prev = groups[prev_idx].anchor; + let curr = groups[curr_idx].anchor; + let next = groups[next_idx].anchor; - // Skip near-straight corners - if theta > PI - min_angle_threshold.to_radians() { - new_groups.push(groups[curr_idx]); - continue; - } + let dir1 = (curr - prev).normalize_or(DVec2::X); + let dir2 = (next - curr).normalize_or(DVec2::X); - // Calculate L, with limits to avoid extreme values - let distance_along_edge = radius / (theta / 2.).sin(); - let distance_along_edge = distance_along_edge.min(edge_length_limit * (curr - prev).length().min((next - curr).length())).max(0.01); + let theta = PI - dir1.angle_to(dir2).abs(); - // Find points on each edge at distance L from corner - let p1 = curr - dir1 * distance_along_edge; - let p2 = curr + dir2 * distance_along_edge; + // Skip near-straight corners + if theta > PI - min_angle_threshold.to_radians() { + new_groups.push(groups[curr_idx]); + continue; + } - // Add first point (coming into the rounded corner) - new_groups.push(ManipulatorGroup { - anchor: p1, - in_handle: None, - out_handle: Some(curr - dir1 * distance_along_edge * roundness), - id: initial_point_id.next_id(), - }); + // Calculate L, with limits to avoid extreme values + let distance_along_edge = radius / (theta / 2.).sin(); + let distance_along_edge = distance_along_edge.min(edge_length_limit * (curr - prev).length().min((next - curr).length())).max(0.01); - // Add second point (coming out of the rounded corner) - new_groups.push(ManipulatorGroup { - anchor: p2, - in_handle: Some(curr + dir2 * distance_along_edge * roundness), - out_handle: None, - id: initial_point_id.next_id(), - }); - } + // Find points on each edge at distance L from corner + let p1 = curr - dir1 * distance_along_edge; + let p2 = curr + dir2 * distance_along_edge; - // One subpath for each shape - let mut rounded_subpath = Subpath::new(new_groups, is_closed); - rounded_subpath.apply_transform(source_transform_inverse); - result.append_subpath(rounded_subpath, false); - } + // Add first point (coming into the rounded corner) + new_groups.push(ManipulatorGroup { + anchor: p1, + in_handle: None, + out_handle: Some(curr - dir1 * distance_along_edge * roundness), + id: initial_point_id.next_id(), + }); - result.upstream_graphic_group = upstream_graphic_group; + // Add second point (coming out of the rounded corner) + new_groups.push(ManipulatorGroup { + anchor: p2, + in_handle: Some(curr + dir2 * distance_along_edge * roundness), + out_handle: None, + id: initial_point_id.next_id(), + }); + } - result_table.push(Instance { - instance: result, - transform: source_transform, - alpha_blending: Default::default(), - source_node_id: None, - }); - } + // One subpath for each shape + let mut rounded_subpath = Subpath::new(new_groups, is_closed); + rounded_subpath.apply_transform(source_transform_inverse); + result.append_subpath(rounded_subpath, false); + } - result_table + result.upstream_graphic_group = upstream_graphic_group; + + Instance { + instance: result, + transform: source_transform, + alpha_blending: Default::default(), + source_node_id: None, + } + }) + .collect() } #[node_macro::node(name("Merge by Distance"), category("Vector: Modifier"), path(graphene_core::vector))] @@ -558,24 +557,22 @@ pub fn merge_by_distance( distance: PixelLength, algorithm: MergeByDistanceAlgorithm, ) -> VectorDataTable { - let mut result_table = VectorDataTable::default(); - match algorithm { - MergeByDistanceAlgorithm::Spatial => { - for mut vector_data_instance in vector_data.instance_iter() { + MergeByDistanceAlgorithm::Spatial => vector_data + .instance_iter() + .map(|mut vector_data_instance| { vector_data_instance.instance.merge_by_distance_spatial(vector_data_instance.transform, distance); - result_table.push(vector_data_instance); - } - } - MergeByDistanceAlgorithm::Topological => { - for mut vector_data_instance in vector_data.instance_iter() { + vector_data_instance + }) + .collect(), + MergeByDistanceAlgorithm::Topological => vector_data + .instance_iter() + .map(|mut vector_data_instance| { vector_data_instance.instance.merge_by_distance_topological(distance); - result_table.push(vector_data_instance); - } - } + vector_data_instance + }) + .collect(), } - - result_table } #[node_macro::node(category("Vector: Modifier"), path(graphene_core::vector))] @@ -584,75 +581,74 @@ async fn box_warp(_: impl Ctx, vector_data: VectorDataTable, #[expose] rectangle return vector_data; }; - let mut result_table = VectorDataTable::default(); - - for mut vector_data_instance in vector_data.instance_iter() { - let vector_data_transform = vector_data_instance.transform; - let vector_data = vector_data_instance.instance; - - // Get the bounding box of the source vector data - let source_bbox = vector_data.bounding_box_with_transform(vector_data_transform).unwrap_or([DVec2::ZERO, DVec2::ONE]); - - // Extract first 4 points from target shape to form the quadrilateral - // Apply the target's transform to get points in world space - let target_points: Vec = target.point_domain.positions().iter().map(|&p| target_transform.transform_point2(p)).take(4).collect(); - - // If we have fewer than 4 points, use the corners of the source bounding box - // This handles the degenerative case - let dst_corners = if target_points.len() >= 4 { - [target_points[0], target_points[1], target_points[2], target_points[3]] - } else { - warn!("Target shape has fewer than 4 points. Using source bounding box instead."); - [ - source_bbox[0], - DVec2::new(source_bbox[1].x, source_bbox[0].y), - source_bbox[1], - DVec2::new(source_bbox[0].x, source_bbox[1].y), - ] - }; + vector_data + .instance_iter() + .map(|mut vector_data_instance| { + let vector_data_transform = vector_data_instance.transform; + let vector_data = vector_data_instance.instance; - // Apply the warp - let mut result = vector_data.clone(); + // Get the bounding box of the source vector data + let source_bbox = vector_data.bounding_box_with_transform(vector_data_transform).unwrap_or([DVec2::ZERO, DVec2::ONE]); - // Precompute source bounding box size for normalization - let source_size = source_bbox[1] - source_bbox[0]; + // Extract first 4 points from target shape to form the quadrilateral + // Apply the target's transform to get points in world space + let target_points: Vec = target.point_domain.positions().iter().map(|&p| target_transform.transform_point2(p)).take(4).collect(); - // Transform points - for (_, position) in result.point_domain.positions_mut() { - // Get the point in world space - let world_pos = vector_data_transform.transform_point2(*position); + // If we have fewer than 4 points, use the corners of the source bounding box + // This handles the degenerative case + let dst_corners = if target_points.len() >= 4 { + [target_points[0], target_points[1], target_points[2], target_points[3]] + } else { + warn!("Target shape has fewer than 4 points. Using source bounding box instead."); + [ + source_bbox[0], + DVec2::new(source_bbox[1].x, source_bbox[0].y), + source_bbox[1], + DVec2::new(source_bbox[0].x, source_bbox[1].y), + ] + }; - // Normalize coordinates within the source bounding box - let t = ((world_pos - source_bbox[0]) / source_size).clamp(DVec2::ZERO, DVec2::ONE); + // Apply the warp + let mut result = vector_data.clone(); - // Apply bilinear interpolation - *position = bilinear_interpolate(t, &dst_corners); - } + // Precompute source bounding box size for normalization + let source_size = source_bbox[1] - source_bbox[0]; - // Transform handles in bezier curves - for (_, handles, _, _) in result.handles_mut() { - *handles = handles.apply_transformation(|pos| { - // Get the handle in world space - let world_pos = vector_data_transform.transform_point2(pos); + // Transform points + for (_, position) in result.point_domain.positions_mut() { + // Get the point in world space + let world_pos = vector_data_transform.transform_point2(*position); // Normalize coordinates within the source bounding box let t = ((world_pos - source_bbox[0]) / source_size).clamp(DVec2::ZERO, DVec2::ONE); // Apply bilinear interpolation - bilinear_interpolate(t, &dst_corners) - }); - } + *position = bilinear_interpolate(t, &dst_corners); + } - result.style.set_stroke_transform(DAffine2::IDENTITY); + // Transform handles in bezier curves + for (_, handles, _, _) in result.handles_mut() { + *handles = handles.apply_transformation(|pos| { + // Get the handle in world space + let world_pos = vector_data_transform.transform_point2(pos); - // Add this to the table and reset the transform since we've applied it directly to the points - vector_data_instance.instance = result; - vector_data_instance.transform = DAffine2::IDENTITY; - vector_data_instance.source_node_id = None; - result_table.push(vector_data_instance); - } + // Normalize coordinates within the source bounding box + let t = ((world_pos - source_bbox[0]) / source_size).clamp(DVec2::ZERO, DVec2::ONE); - result_table + // Apply bilinear interpolation + bilinear_interpolate(t, &dst_corners) + }); + } + + result.style.set_stroke_transform(DAffine2::IDENTITY); + + // Add this to the table and reset the transform since we've applied it directly to the points + vector_data_instance.instance = result; + vector_data_instance.transform = DAffine2::IDENTITY; + vector_data_instance.source_node_id = None; + vector_data_instance + }) + .collect() } // Interpolate within a quadrilateral using normalized coordinates (0-1) @@ -679,115 +675,114 @@ async fn auto_tangents( #[default(true)] preserve_existing: bool, ) -> VectorDataTable { - let mut result_table = VectorDataTable::default(); - - for source in source.instance_ref_iter() { - let transform = *source.transform; - let alpha_blending = *source.alpha_blending; - let source_node_id = *source.source_node_id; - let source = source.instance; + source + .instance_ref_iter() + .map(|source| { + let transform = *source.transform; + let alpha_blending = *source.alpha_blending; + let source_node_id = *source.source_node_id; + let source = source.instance; + + let mut result = VectorData { + style: source.style.clone(), + ..Default::default() + }; - let mut result = VectorData { - style: source.style.clone(), - ..Default::default() - }; + for mut subpath in source.stroke_bezier_paths() { + subpath.apply_transform(transform); - for mut subpath in source.stroke_bezier_paths() { - subpath.apply_transform(transform); + let groups = subpath.manipulator_groups(); + if groups.len() < 2 { + // Not enough points for softening or handle removal + result.append_subpath(subpath, true); + continue; + } - let groups = subpath.manipulator_groups(); - if groups.len() < 2 { - // Not enough points for softening or handle removal - result.append_subpath(subpath, true); - continue; - } + let mut new_groups = Vec::with_capacity(groups.len()); + let is_closed = subpath.closed(); - let mut new_groups = Vec::with_capacity(groups.len()); - let is_closed = subpath.closed(); + for i in 0..groups.len() { + let curr = &groups[i]; - for i in 0..groups.len() { - let curr = &groups[i]; + if preserve_existing { + // Check if this point has handles that are meaningfully different from the anchor + let has_handles = (curr.in_handle.is_some() && !curr.in_handle.unwrap().abs_diff_eq(curr.anchor, 1e-5)) + || (curr.out_handle.is_some() && !curr.out_handle.unwrap().abs_diff_eq(curr.anchor, 1e-5)); - if preserve_existing { - // Check if this point has handles that are meaningfully different from the anchor - let has_handles = (curr.in_handle.is_some() && !curr.in_handle.unwrap().abs_diff_eq(curr.anchor, 1e-5)) - || (curr.out_handle.is_some() && !curr.out_handle.unwrap().abs_diff_eq(curr.anchor, 1e-5)); + // If the point already has handles, or if it's an endpoint of an open path, keep it as is. + if has_handles || (!is_closed && (i == 0 || i == groups.len() - 1)) { + new_groups.push(*curr); + continue; + } + } - // If the point already has handles, or if it's an endpoint of an open path, keep it as is. - if has_handles || (!is_closed && (i == 0 || i == groups.len() - 1)) { - new_groups.push(*curr); + // If spread is 0, remove handles for this point, making it a sharp corner. + if spread == 0. { + new_groups.push(ManipulatorGroup { + anchor: curr.anchor, + in_handle: None, + out_handle: None, + id: curr.id, + }); continue; } - } - // If spread is 0, remove handles for this point, making it a sharp corner. - if spread == 0. { - new_groups.push(ManipulatorGroup { - anchor: curr.anchor, - in_handle: None, - out_handle: None, - id: curr.id, - }); - continue; - } + // Get previous and next points for auto-tangent calculation + let prev_idx = if i == 0 { if is_closed { groups.len() - 1 } else { i } } else { i - 1 }; + let next_idx = if i == groups.len() - 1 { if is_closed { 0 } else { i } } else { i + 1 }; - // Get previous and next points for auto-tangent calculation - let prev_idx = if i == 0 { if is_closed { groups.len() - 1 } else { i } } else { i - 1 }; - let next_idx = if i == groups.len() - 1 { if is_closed { 0 } else { i } } else { i + 1 }; + let prev = groups[prev_idx].anchor; + let curr_pos = curr.anchor; + let next = groups[next_idx].anchor; - let prev = groups[prev_idx].anchor; - let curr_pos = curr.anchor; - let next = groups[next_idx].anchor; + // Calculate directions from current point to adjacent points + let dir_prev = (prev - curr_pos).normalize_or_zero(); + let dir_next = (next - curr_pos).normalize_or_zero(); - // Calculate directions from current point to adjacent points - let dir_prev = (prev - curr_pos).normalize_or_zero(); - let dir_next = (next - curr_pos).normalize_or_zero(); + // Check if we have valid directions (e.g., points are not coincident) + if dir_prev.length_squared() < 1e-5 || dir_next.length_squared() < 1e-5 { + // Fallback: keep the original manipulator group (which has no active handles here) + new_groups.push(*curr); + continue; + } - // Check if we have valid directions (e.g., points are not coincident) - if dir_prev.length_squared() < 1e-5 || dir_next.length_squared() < 1e-5 { - // Fallback: keep the original manipulator group (which has no active handles here) - new_groups.push(*curr); - continue; - } + // Calculate handle direction (colinear, pointing along the line from prev to next) + // Original logic: (dir_prev - dir_next) is equivalent to (prev - curr) - (next - curr) = prev - next + // The handle_dir will be along the line connecting prev and next, or perpendicular if they are coincident. + let mut handle_dir = (dir_prev - dir_next).try_normalize().unwrap_or_else(|| dir_prev.perp()); - // Calculate handle direction (colinear, pointing along the line from prev to next) - // Original logic: (dir_prev - dir_next) is equivalent to (prev - curr) - (next - curr) = prev - next - // The handle_dir will be along the line connecting prev and next, or perpendicular if they are coincident. - let mut handle_dir = (dir_prev - dir_next).try_normalize().unwrap_or_else(|| dir_prev.perp()); + // Ensure consistent orientation of the handle_dir + // This makes the `+ handle_dir` for in_handle and `- handle_dir` for out_handle consistent + if dir_prev.dot(handle_dir) < 0. { + handle_dir = -handle_dir; + } - // Ensure consistent orientation of the handle_dir - // This makes the `+ handle_dir` for in_handle and `- handle_dir` for out_handle consistent - if dir_prev.dot(handle_dir) < 0. { - handle_dir = -handle_dir; - } + // Calculate handle lengths: 1/3 of distance to adjacent points, scaled by spread + let in_length = (curr_pos - prev).length() / 3. * spread; + let out_length = (next - curr_pos).length() / 3. * spread; - // Calculate handle lengths: 1/3 of distance to adjacent points, scaled by spread - let in_length = (curr_pos - prev).length() / 3. * spread; - let out_length = (next - curr_pos).length() / 3. * spread; + // Create new manipulator group with calculated auto-tangents + new_groups.push(ManipulatorGroup { + anchor: curr_pos, + in_handle: Some(curr_pos + handle_dir * in_length), + out_handle: Some(curr_pos - handle_dir * out_length), + id: curr.id, + }); + } - // Create new manipulator group with calculated auto-tangents - new_groups.push(ManipulatorGroup { - anchor: curr_pos, - in_handle: Some(curr_pos + handle_dir * in_length), - out_handle: Some(curr_pos - handle_dir * out_length), - id: curr.id, - }); + let mut softened_subpath = Subpath::new(new_groups, is_closed); + softened_subpath.apply_transform(transform.inverse()); + result.append_subpath(softened_subpath, true); } - let mut softened_subpath = Subpath::new(new_groups, is_closed); - softened_subpath.apply_transform(transform.inverse()); - result.append_subpath(softened_subpath, true); - } - - result_table.push(Instance { - instance: result, - transform, - alpha_blending, - source_node_id, - }); - } - - result_table + Instance { + instance: result, + transform, + alpha_blending, + source_node_id, + } + }) + .collect() } // TODO: Fix issues and reenable @@ -904,29 +899,28 @@ async fn auto_tangents( #[node_macro::node(category("Vector: Modifier"), path(graphene_core::vector))] async fn bounding_box(_: impl Ctx, vector_data: VectorDataTable) -> VectorDataTable { - let mut result_table = VectorDataTable::default(); - - for mut vector_data_instance in vector_data.instance_iter() { - let vector_data = vector_data_instance.instance; - - let mut result = vector_data - .bounding_box_rect() - .map(|bbox| { - let mut vector_data = VectorData::default(); - vector_data.append_bezpath(bbox.to_path(DEFAULT_ACCURACY)); - vector_data - }) - .unwrap_or_default(); - - result.style = vector_data.style.clone(); - result.style.set_stroke_transform(DAffine2::IDENTITY); + vector_data + .instance_iter() + .map(|mut vector_data_instance| { + let vector_data = vector_data_instance.instance; + + let mut result = vector_data + .bounding_box_rect() + .map(|bbox| { + let mut vector_data = VectorData::default(); + vector_data.append_bezpath(bbox.to_path(DEFAULT_ACCURACY)); + vector_data + }) + .unwrap_or_default(); - vector_data_instance.instance = result; - vector_data_instance.source_node_id = None; - result_table.push(vector_data_instance); - } + result.style = vector_data.style.clone(); + result.style.set_stroke_transform(DAffine2::IDENTITY); - result_table + vector_data_instance.instance = result; + vector_data_instance.source_node_id = None; + vector_data_instance + }) + .collect() } #[node_macro::node(category("Vector: Measure"), path(graphene_core::vector))] @@ -944,17 +938,13 @@ async fn dimensions(_: impl Ctx, vector_data: VectorDataTable) -> DVec2 { /// This is useful in conjunction with nodes that repeat it, followed by the "Points to Polyline" node to string together a path of the points. #[node_macro::node(category("Vector"), name("Coordinate to Point"), path(graphene_core::vector))] async fn position_to_point(_: impl Ctx, coordinate: DVec2) -> VectorDataTable { - let mut result_table = VectorDataTable::default(); - let mut point_domain = PointDomain::new(); point_domain.push(PointId::generate(), coordinate); - result_table.push(Instance { + VectorDataTable::new_instance(Instance { instance: VectorData { point_domain, ..Default::default() }, ..Default::default() - }); - - result_table + }) } /// Creates a polyline from a series of vector points, replacing any existing segments and regions that may already exist. @@ -988,102 +978,100 @@ async fn points_to_polyline(_: impl Ctx, mut points: VectorDataTable, #[default( #[node_macro::node(category("Vector: Modifier"), path(graphene_core::vector), properties("offset_path_properties"))] async fn offset_path(_: impl Ctx, vector_data: VectorDataTable, distance: f64, join: StrokeJoin, #[default(4.)] miter_limit: f64) -> VectorDataTable { - let mut result_table = VectorDataTable::default(); - - for mut vector_data_instance in vector_data.instance_iter() { - let vector_data_transform = vector_data_instance.transform; - let vector_data = vector_data_instance.instance; - - let subpaths = vector_data.stroke_bezier_paths(); - let mut result = VectorData { - style: vector_data.style.clone(), - ..Default::default() - }; - result.style.set_stroke_transform(DAffine2::IDENTITY); - - // Perform operation on all subpaths in this shape. - for mut subpath in subpaths { - subpath.apply_transform(vector_data_transform); - - // Taking the existing stroke data and passing it to Bezier-rs to generate new paths. - let mut subpath_out = offset_subpath( - &subpath, - -distance, - match join { - StrokeJoin::Miter => Join::Miter(Some(miter_limit)), - StrokeJoin::Bevel => Join::Bevel, - StrokeJoin::Round => Join::Round, - }, - ); - - subpath_out.apply_transform(vector_data_transform.inverse()); - - // One closed subpath, open path. - result.append_subpath(subpath_out, false); - } - - vector_data_instance.instance = result; - vector_data_instance.source_node_id = None; - result_table.push(vector_data_instance); - } + vector_data + .instance_iter() + .map(|mut vector_data_instance| { + let vector_data_transform = vector_data_instance.transform; + let vector_data = vector_data_instance.instance; + + let subpaths = vector_data.stroke_bezier_paths(); + let mut result = VectorData { + style: vector_data.style.clone(), + ..Default::default() + }; + result.style.set_stroke_transform(DAffine2::IDENTITY); + + // Perform operation on all subpaths in this shape. + for mut subpath in subpaths { + subpath.apply_transform(vector_data_transform); + + // Taking the existing stroke data and passing it to Bezier-rs to generate new paths. + let mut subpath_out = offset_subpath( + &subpath, + -distance, + match join { + StrokeJoin::Miter => Join::Miter(Some(miter_limit)), + StrokeJoin::Bevel => Join::Bevel, + StrokeJoin::Round => Join::Round, + }, + ); + + subpath_out.apply_transform(vector_data_transform.inverse()); + + // One closed subpath, open path. + result.append_subpath(subpath_out, false); + } - result_table + vector_data_instance.instance = result; + vector_data_instance.source_node_id = None; + vector_data_instance + }) + .collect() } #[node_macro::node(category("Vector: Modifier"), path(graphene_core::vector))] async fn solidify_stroke(_: impl Ctx, vector_data: VectorDataTable) -> VectorDataTable { - let mut result_table = VectorDataTable::default(); - - for mut vector_data_instance in vector_data.instance_iter() { - let vector_data = vector_data_instance.instance; - - let stroke = vector_data.style.stroke().clone().unwrap_or_default(); - let bezpaths = vector_data.stroke_bezpath_iter(); - let mut result = VectorData::default(); - - // Taking the existing stroke data and passing it to kurbo::stroke to generate new fill paths. - let join = match stroke.join { - StrokeJoin::Miter => kurbo::Join::Miter, - StrokeJoin::Bevel => kurbo::Join::Bevel, - StrokeJoin::Round => kurbo::Join::Round, - }; - let cap = match stroke.cap { - StrokeCap::Butt => kurbo::Cap::Butt, - StrokeCap::Round => kurbo::Cap::Round, - StrokeCap::Square => kurbo::Cap::Square, - }; - let dash_offset = stroke.dash_offset; - let dash_pattern = stroke.dash_lengths; - let miter_limit = stroke.join_miter_limit; - - let stroke_style = kurbo::Stroke::new(stroke.weight) - .with_caps(cap) - .with_join(join) - .with_dashes(dash_offset, dash_pattern) - .with_miter_limit(miter_limit); + vector_data + .instance_iter() + .map(|mut vector_data_instance| { + let vector_data = vector_data_instance.instance; + + let stroke = vector_data.style.stroke().clone().unwrap_or_default(); + let bezpaths = vector_data.stroke_bezpath_iter(); + let mut result = VectorData::default(); + + // Taking the existing stroke data and passing it to kurbo::stroke to generate new fill paths. + let join = match stroke.join { + StrokeJoin::Miter => kurbo::Join::Miter, + StrokeJoin::Bevel => kurbo::Join::Bevel, + StrokeJoin::Round => kurbo::Join::Round, + }; + let cap = match stroke.cap { + StrokeCap::Butt => kurbo::Cap::Butt, + StrokeCap::Round => kurbo::Cap::Round, + StrokeCap::Square => kurbo::Cap::Square, + }; + let dash_offset = stroke.dash_offset; + let dash_pattern = stroke.dash_lengths; + let miter_limit = stroke.join_miter_limit; - let stroke_options = kurbo::StrokeOpts::default(); + let stroke_style = kurbo::Stroke::new(stroke.weight) + .with_caps(cap) + .with_join(join) + .with_dashes(dash_offset, dash_pattern) + .with_miter_limit(miter_limit); - // 0.25 is balanced between performace and accuracy of the curve. - const STROKE_TOLERANCE: f64 = 0.25; + let stroke_options = kurbo::StrokeOpts::default(); - for path in bezpaths { - let solidified = kurbo::stroke(path, &stroke_style, &stroke_options, STROKE_TOLERANCE); - result.append_bezpath(solidified); - } + // 0.25 is balanced between performace and accuracy of the curve. + const STROKE_TOLERANCE: f64 = 0.25; - // We set our fill to our stroke's color, then clear our stroke. - if let Some(stroke) = vector_data.style.stroke() { - result.style.set_fill(Fill::solid_or_none(stroke.color)); - result.style.set_stroke(Stroke::default()); - } + for path in bezpaths { + let solidified = kurbo::stroke(path, &stroke_style, &stroke_options, STROKE_TOLERANCE); + result.append_bezpath(solidified); + } - vector_data_instance.instance = result; - vector_data_instance.source_node_id = None; - result_table.push(vector_data_instance); - } + // We set our fill to our stroke's color, then clear our stroke. + if let Some(stroke) = vector_data.style.stroke() { + result.style.set_fill(Fill::solid_or_none(stroke.color)); + result.style.set_stroke(Stroke::default()); + } - result_table + vector_data_instance.instance = result; + vector_data_instance.source_node_id = None; + vector_data_instance + }) + .collect() } #[node_macro::node(category("Vector"), path(graphene_core::vector))] @@ -1154,60 +1142,59 @@ async fn sample_polyline( adaptive_spacing: bool, subpath_segment_lengths: Vec, ) -> VectorDataTable { - let mut result_table = VectorDataTable::default(); - - for mut vector_data_instance in vector_data.instance_iter() { - let mut result = VectorData { - point_domain: Default::default(), - segment_domain: Default::default(), - region_domain: Default::default(), - colinear_manipulators: Default::default(), - style: std::mem::take(&mut vector_data_instance.instance.style), - upstream_graphic_group: std::mem::take(&mut vector_data_instance.instance.upstream_graphic_group), - }; - // Transfer the stroke transform from the input vector data to the result. - result.style.set_stroke_transform(vector_data_instance.transform); + vector_data + .instance_iter() + .map(|mut vector_data_instance| { + let mut result = VectorData { + point_domain: Default::default(), + segment_domain: Default::default(), + region_domain: Default::default(), + colinear_manipulators: Default::default(), + style: std::mem::take(&mut vector_data_instance.instance.style), + upstream_graphic_group: std::mem::take(&mut vector_data_instance.instance.upstream_graphic_group), + }; + // Transfer the stroke transform from the input vector data to the result. + result.style.set_stroke_transform(vector_data_instance.transform); - // Using `stroke_bezpath_iter` so that the `subpath_segment_lengths` is aligned to the segments of each bezpath. - // So we can index into `subpath_segment_lengths` to get the length of the segments. - // NOTE: `subpath_segment_lengths` has precalulated lengths with transformation applied. - let bezpaths = vector_data_instance.instance.stroke_bezpath_iter(); + // Using `stroke_bezpath_iter` so that the `subpath_segment_lengths` is aligned to the segments of each bezpath. + // So we can index into `subpath_segment_lengths` to get the length of the segments. + // NOTE: `subpath_segment_lengths` has precalulated lengths with transformation applied. + let bezpaths = vector_data_instance.instance.stroke_bezpath_iter(); - // Keeps track of the index of the first segment of the next bezpath in order to get lengths of all segments. - let mut next_segment_index = 0; + // Keeps track of the index of the first segment of the next bezpath in order to get lengths of all segments. + let mut next_segment_index = 0; - for mut bezpath in bezpaths { - // Apply the tranformation to the current bezpath to calculate points after transformation. - bezpath.apply_affine(Affine::new(vector_data_instance.transform.to_cols_array())); + for mut bezpath in bezpaths { + // Apply the tranformation to the current bezpath to calculate points after transformation. + bezpath.apply_affine(Affine::new(vector_data_instance.transform.to_cols_array())); - let segment_count = bezpath.segments().count(); + let segment_count = bezpath.segments().count(); - // For the current bezpath we get its segment's length by calculating the start index and end index. - let current_bezpath_segments_length = &subpath_segment_lengths[next_segment_index..next_segment_index + segment_count]; + // For the current bezpath we get its segment's length by calculating the start index and end index. + let current_bezpath_segments_length = &subpath_segment_lengths[next_segment_index..next_segment_index + segment_count]; - // Increment the segment index by the number of segments in the current bezpath to calculate the next bezpath segment's length. - next_segment_index += segment_count; + // Increment the segment index by the number of segments in the current bezpath to calculate the next bezpath segment's length. + next_segment_index += segment_count; - let amount = match spacing { - PointSpacingType::Separation => separation, - PointSpacingType::Quantity => quantity as f64, - }; - let Some(mut sample_bezpath) = sample_polyline_on_bezpath(bezpath, spacing, amount, start_offset, stop_offset, adaptive_spacing, current_bezpath_segments_length) else { - continue; - }; - - // Reverse the transformation applied to the bezpath as the `result` already has the transformation set. - sample_bezpath.apply_affine(Affine::new(vector_data_instance.transform.to_cols_array()).inverse()); + let amount = match spacing { + PointSpacingType::Separation => separation, + PointSpacingType::Quantity => quantity as f64, + }; + let Some(mut sample_bezpath) = sample_polyline_on_bezpath(bezpath, spacing, amount, start_offset, stop_offset, adaptive_spacing, current_bezpath_segments_length) else { + continue; + }; - // Append the bezpath (subpath) that connects generated points by lines. - result.append_bezpath(sample_bezpath); - } + // Reverse the transformation applied to the bezpath as the `result` already has the transformation set. + sample_bezpath.apply_affine(Affine::new(vector_data_instance.transform.to_cols_array()).inverse()); - vector_data_instance.instance = result; - result_table.push(vector_data_instance); - } + // Append the bezpath (subpath) that connects generated points by lines. + result.append_bezpath(sample_bezpath); + } - result_table + vector_data_instance.instance = result; + vector_data_instance + }) + .collect() } /// Splits a path at a given progress from 0 to 1 along the path, creating two new subpaths from the original one (if the path is initially open) or one open subpath (if the path is initially closed). @@ -1404,42 +1391,40 @@ async fn poisson_disk_points( ) -> VectorDataTable { let mut rng = rand::rngs::StdRng::seed_from_u64(seed.into()); - let mut result_table = VectorDataTable::default(); - - for mut vector_data_instance in vector_data.instance_iter() { - let mut result = VectorData::default(); + vector_data + .instance_iter() + .map(|mut vector_data_instance| { + let mut result = VectorData::default(); - let path_with_bounding_boxes: Vec<_> = vector_data_instance - .instance - .stroke_bezpath_iter() - .map(|mut bezpath| { - // TODO: apply transform to points instead of modifying the paths - bezpath.close_path(); - let bbox = bezpath.bounding_box(); - (bezpath, bbox) - }) - .collect(); + let path_with_bounding_boxes: Vec<_> = vector_data_instance + .instance + .stroke_bezpath_iter() + .map(|mut bezpath| { + // TODO: apply transform to points instead of modifying the paths + bezpath.close_path(); + let bbox = bezpath.bounding_box(); + (bezpath, bbox) + }) + .collect(); - for (i, (subpath, _)) in path_with_bounding_boxes.iter().enumerate() { - if subpath.segments().count() < 2 { - continue; - } + for (i, (subpath, _)) in path_with_bounding_boxes.iter().enumerate() { + if subpath.segments().count() < 2 { + continue; + } - for point in bezpath_algorithms::poisson_disk_points(i, &path_with_bounding_boxes, separation_disk_diameter, || rng.random::()) { - result.point_domain.push(PointId::generate(), point); + for point in bezpath_algorithms::poisson_disk_points(i, &path_with_bounding_boxes, separation_disk_diameter, || rng.random::()) { + result.point_domain.push(PointId::generate(), point); + } } - } - - // Transfer the style from the input vector data to the result. - result.style = vector_data_instance.instance.style.clone(); - result.style.set_stroke_transform(DAffine2::IDENTITY); - vector_data_instance.instance = result; - - result_table.push(vector_data_instance); - } + // Transfer the style from the input vector data to the result. + result.style = vector_data_instance.instance.style.clone(); + result.style.set_stroke_transform(DAffine2::IDENTITY); - result_table + vector_data_instance.instance = result; + vector_data_instance + }) + .collect() } #[node_macro::node(category(""), path(graphene_core::vector))] @@ -1462,48 +1447,47 @@ async fn subpath_segment_lengths(_: impl Ctx, vector_data: VectorDataTable) -> V #[node_macro::node(name("Spline"), category("Vector: Modifier"), path(graphene_core::vector))] async fn spline(_: impl Ctx, vector_data: VectorDataTable) -> VectorDataTable { - let mut result_table = VectorDataTable::default(); - - for mut vector_data_instance in vector_data.instance_iter() { - // Exit early if there are no points to generate splines from. - if vector_data_instance.instance.point_domain.positions().is_empty() { - continue; - } + vector_data + .instance_iter() + .filter_map(|mut vector_data_instance| { + // Exit early if there are no points to generate splines from. + if vector_data_instance.instance.point_domain.positions().is_empty() { + return None; + } - let mut segment_domain = SegmentDomain::default(); - for (manipulator_groups, closed) in vector_data_instance.instance.stroke_manipulator_groups() { - let positions = manipulator_groups.iter().map(|group| group.anchor).collect::>(); - let closed = closed && positions.len() > 2; + let mut segment_domain = SegmentDomain::default(); + for (manipulator_groups, closed) in vector_data_instance.instance.stroke_manipulator_groups() { + let positions = manipulator_groups.iter().map(|group| group.anchor).collect::>(); + let closed = closed && positions.len() > 2; - // Compute control point handles for Bezier spline. - let first_handles = if closed { - solve_spline_first_handle_closed(&positions) - } else { - solve_spline_first_handle_open(&positions) - }; + // Compute control point handles for Bezier spline. + let first_handles = if closed { + solve_spline_first_handle_closed(&positions) + } else { + solve_spline_first_handle_open(&positions) + }; - let stroke_id = StrokeId::ZERO; + let stroke_id = StrokeId::ZERO; - // Create segments with computed Bezier handles and add them to vector data. - for i in 0..(positions.len() - if closed { 0 } else { 1 }) { - let next_index = (i + 1) % positions.len(); + // Create segments with computed Bezier handles and add them to vector data. + for i in 0..(positions.len() - if closed { 0 } else { 1 }) { + let next_index = (i + 1) % positions.len(); - let start_index = vector_data_instance.instance.point_domain.resolve_id(manipulator_groups[i].id).unwrap(); - let end_index = vector_data_instance.instance.point_domain.resolve_id(manipulator_groups[next_index].id).unwrap(); + let start_index = vector_data_instance.instance.point_domain.resolve_id(manipulator_groups[i].id).unwrap(); + let end_index = vector_data_instance.instance.point_domain.resolve_id(manipulator_groups[next_index].id).unwrap(); - let handle_start = first_handles[i]; - let handle_end = positions[next_index] * 2. - first_handles[next_index]; - let handles = bezier_rs::BezierHandles::Cubic { handle_start, handle_end }; + let handle_start = first_handles[i]; + let handle_end = positions[next_index] * 2. - first_handles[next_index]; + let handles = bezier_rs::BezierHandles::Cubic { handle_start, handle_end }; - segment_domain.push(SegmentId::generate(), start_index, end_index, handles, stroke_id); + segment_domain.push(SegmentId::generate(), start_index, end_index, handles, stroke_id); + } } - } - - vector_data_instance.instance.segment_domain = segment_domain; - result_table.push(vector_data_instance); - } - result_table + vector_data_instance.instance.segment_domain = segment_domain; + Some(vector_data_instance) + }) + .collect() } #[node_macro::node(category("Vector: Modifier"), path(graphene_core::vector))] @@ -1515,55 +1499,58 @@ async fn jitter_points( amount: f64, seed: SeedValue, ) -> VectorDataTable { - let mut result_table = VectorDataTable::default(); - - for mut vector_data_instance in vector_data.instance_iter() { - let mut rng = rand::rngs::StdRng::seed_from_u64(seed.into()); - - let vector_data_transform = vector_data_instance.transform; - let inverse_transform = (vector_data_transform.matrix2.determinant() != 0.).then(|| vector_data_transform.inverse()).unwrap_or_default(); + vector_data + .instance_iter() + .map(|mut vector_data_instance| { + let mut rng = rand::rngs::StdRng::seed_from_u64(seed.into()); - let deltas = (0..vector_data_instance.instance.point_domain.positions().len()) - .map(|_| { - let angle = rng.random::() * TAU; + let vector_data_transform = vector_data_instance.transform; + let inverse_transform = if vector_data_transform.matrix2.determinant() != 0. { + vector_data_transform.inverse() + } else { + Default::default() + }; - inverse_transform.transform_vector2(DVec2::from_angle(angle) * rng.random::() * amount) - }) - .collect::>(); - let mut already_applied = vec![false; vector_data_instance.instance.point_domain.positions().len()]; + let deltas = (0..vector_data_instance.instance.point_domain.positions().len()) + .map(|_| { + let angle = rng.random::() * TAU; - for (handles, start, end) in vector_data_instance.instance.segment_domain.handles_and_points_mut() { - let start_delta = deltas[*start]; - let end_delta = deltas[*end]; + inverse_transform.transform_vector2(DVec2::from_angle(angle) * rng.random::() * amount) + }) + .collect::>(); + let mut already_applied = vec![false; vector_data_instance.instance.point_domain.positions().len()]; - if !already_applied[*start] { - let start_position = vector_data_instance.instance.point_domain.positions()[*start]; - vector_data_instance.instance.point_domain.set_position(*start, start_position + start_delta); - already_applied[*start] = true; - } - if !already_applied[*end] { - let end_position = vector_data_instance.instance.point_domain.positions()[*end]; - vector_data_instance.instance.point_domain.set_position(*end, end_position + end_delta); - already_applied[*end] = true; - } + for (handles, start, end) in vector_data_instance.instance.segment_domain.handles_and_points_mut() { + let start_delta = deltas[*start]; + let end_delta = deltas[*end]; - match handles { - bezier_rs::BezierHandles::Cubic { handle_start, handle_end } => { - *handle_start += start_delta; - *handle_end += end_delta; + if !already_applied[*start] { + let start_position = vector_data_instance.instance.point_domain.positions()[*start]; + vector_data_instance.instance.point_domain.set_position(*start, start_position + start_delta); + already_applied[*start] = true; } - bezier_rs::BezierHandles::Quadratic { handle } => { - *handle = vector_data_instance.transform.transform_point2(*handle) + (start_delta + end_delta) / 2.; + if !already_applied[*end] { + let end_position = vector_data_instance.instance.point_domain.positions()[*end]; + vector_data_instance.instance.point_domain.set_position(*end, end_position + end_delta); + already_applied[*end] = true; } - bezier_rs::BezierHandles::Linear => {} - } - } - vector_data_instance.instance.style.set_stroke_transform(DAffine2::IDENTITY); - result_table.push(vector_data_instance); - } + match handles { + bezier_rs::BezierHandles::Cubic { handle_start, handle_end } => { + *handle_start += start_delta; + *handle_end += end_delta; + } + bezier_rs::BezierHandles::Quadratic { handle } => { + *handle = vector_data_instance.transform.transform_point2(*handle) + (start_delta + end_delta) / 2.; + } + bezier_rs::BezierHandles::Linear => {} + } + } - result_table + vector_data_instance.instance.style.set_stroke_transform(DAffine2::IDENTITY); + vector_data_instance + }) + .collect() } #[node_macro::node(category("Vector: Modifier"), path(graphene_core::vector))] @@ -1613,125 +1600,125 @@ async fn morph(_: impl Ctx, source: VectorDataTable, #[expose] target: VectorDat let time = time.clamp(0., 1.); - let mut result_table = VectorDataTable::default(); - - for (source_instance, target_instance) in source.instance_iter().zip(target.instance_iter()) { - let mut vector_data_instance = VectorData::default(); + source + .instance_iter() + .zip(target.instance_iter()) + .map(|(source_instance, target_instance)| { + let mut vector_data_instance = VectorData::default(); - // Lerp styles - let vector_data_alpha_blending = source_instance.alpha_blending.lerp(&target_instance.alpha_blending, time as f32); - vector_data_instance.style = source_instance.instance.style.lerp(&target_instance.instance.style, time); + // Lerp styles + let vector_data_alpha_blending = source_instance.alpha_blending.lerp(&target_instance.alpha_blending, time as f32); + vector_data_instance.style = source_instance.instance.style.lerp(&target_instance.instance.style, time); - // Before and after transforms - let source_transform = source_instance.transform; - let target_transform = target_instance.transform; + // Before and after transforms + let source_transform = source_instance.transform; + let target_transform = target_instance.transform; - // Before and after paths - let source_bezpaths = source_instance.instance.stroke_bezpath_iter(); - let target_bezpaths = target_instance.instance.stroke_bezpath_iter(); + // Before and after paths + let source_bezpaths = source_instance.instance.stroke_bezpath_iter(); + let target_bezpaths = target_instance.instance.stroke_bezpath_iter(); - for (mut source_bezpath, mut target_bezpath) in source_bezpaths.zip(target_bezpaths) { - if source_bezpath.elements().is_empty() || target_bezpath.elements().is_empty() { - continue; - } + for (mut source_bezpath, mut target_bezpath) in source_bezpaths.zip(target_bezpaths) { + if source_bezpath.elements().is_empty() || target_bezpath.elements().is_empty() { + continue; + } - source_bezpath.apply_affine(Affine::new(source_transform.to_cols_array())); - target_bezpath.apply_affine(Affine::new(target_transform.to_cols_array())); - - let target_segment_len = target_bezpath.segments().count(); - let source_segment_len = source_bezpath.segments().count(); - - // Insert new segments to align the number of segments in sorce_bezpath and target_bezpath. - make_new_segments(&mut source_bezpath, target_segment_len.max(source_segment_len) - source_segment_len); - make_new_segments(&mut target_bezpath, source_segment_len.max(target_segment_len) - target_segment_len); - - let source_segments = source_bezpath.segments().collect::>(); - let target_segments = target_bezpath.segments().collect::>(); - - // Interpolate anchors and handles - for (i, (source_element, target_element)) in source_bezpath.elements_mut().iter_mut().zip(target_bezpath.elements_mut().iter_mut()).enumerate() { - match source_element { - PathEl::MoveTo(point) => *point = point.lerp(target_element.end_point().unwrap(), time), - PathEl::ClosePath => {} - elm => { - let mut source_segment = source_segments.get(i - 1).unwrap().to_cubic(); - let target_segment = target_segments.get(i - 1).unwrap().to_cubic(); - source_segment.p0 = source_segment.p0.lerp(target_segment.p0, time); - source_segment.p1 = source_segment.p1.lerp(target_segment.p1, time); - source_segment.p2 = source_segment.p2.lerp(target_segment.p2, time); - source_segment.p3 = source_segment.p3.lerp(target_segment.p3, time); - *elm = PathSeg::Cubic(source_segment).as_path_el(); + source_bezpath.apply_affine(Affine::new(source_transform.to_cols_array())); + target_bezpath.apply_affine(Affine::new(target_transform.to_cols_array())); + + let target_segment_len = target_bezpath.segments().count(); + let source_segment_len = source_bezpath.segments().count(); + + // Insert new segments to align the number of segments in sorce_bezpath and target_bezpath. + make_new_segments(&mut source_bezpath, target_segment_len.max(source_segment_len) - source_segment_len); + make_new_segments(&mut target_bezpath, source_segment_len.max(target_segment_len) - target_segment_len); + + let source_segments = source_bezpath.segments().collect::>(); + let target_segments = target_bezpath.segments().collect::>(); + + // Interpolate anchors and handles + for (i, (source_element, target_element)) in source_bezpath.elements_mut().iter_mut().zip(target_bezpath.elements_mut().iter_mut()).enumerate() { + match source_element { + PathEl::MoveTo(point) => *point = point.lerp(target_element.end_point().unwrap(), time), + PathEl::ClosePath => {} + elm => { + let mut source_segment = source_segments.get(i - 1).unwrap().to_cubic(); + let target_segment = target_segments.get(i - 1).unwrap().to_cubic(); + source_segment.p0 = source_segment.p0.lerp(target_segment.p0, time); + source_segment.p1 = source_segment.p1.lerp(target_segment.p1, time); + source_segment.p2 = source_segment.p2.lerp(target_segment.p2, time); + source_segment.p3 = source_segment.p3.lerp(target_segment.p3, time); + *elm = PathSeg::Cubic(source_segment).as_path_el(); + } } } - } - vector_data_instance.append_bezpath(source_bezpath.clone()); - } + vector_data_instance.append_bezpath(source_bezpath.clone()); + } - // Deal with unmatched extra paths by collapsing them - let source_paths_count = source_instance.instance.stroke_bezpath_iter().count(); - let target_paths_count = target_instance.instance.stroke_bezpath_iter().count(); - let source_paths = source_instance.instance.stroke_bezpath_iter().skip(target_paths_count); - let target_paths = target_instance.instance.stroke_bezpath_iter().skip(source_paths_count); - - for mut source_path in source_paths { - source_path.apply_affine(Affine::new(source_transform.to_cols_array())); - - // Skip if the path has no segments else get the point at the end of the path. - let Some(end) = source_path.segments().last().map(|element| element.end()) else { continue }; - - for element in source_path.elements_mut() { - match element { - PathEl::MoveTo(point) => *point = point.lerp(end, time), - PathEl::LineTo(point) => *point = point.lerp(end, time), - PathEl::QuadTo(point, point1) => { - *point = point.lerp(end, time); - *point1 = point1.lerp(end, time); - } - PathEl::CurveTo(point, point1, point2) => { - *point = point.lerp(end, time); - *point1 = point1.lerp(end, time); - *point2 = point2.lerp(end, time); + // Deal with unmatched extra paths by collapsing them + let source_paths_count = source_instance.instance.stroke_bezpath_iter().count(); + let target_paths_count = target_instance.instance.stroke_bezpath_iter().count(); + let source_paths = source_instance.instance.stroke_bezpath_iter().skip(target_paths_count); + let target_paths = target_instance.instance.stroke_bezpath_iter().skip(source_paths_count); + + for mut source_path in source_paths { + source_path.apply_affine(Affine::new(source_transform.to_cols_array())); + + // Skip if the path has no segments else get the point at the end of the path. + let Some(end) = source_path.segments().last().map(|element| element.end()) else { continue }; + + for element in source_path.elements_mut() { + match element { + PathEl::MoveTo(point) => *point = point.lerp(end, time), + PathEl::LineTo(point) => *point = point.lerp(end, time), + PathEl::QuadTo(point, point1) => { + *point = point.lerp(end, time); + *point1 = point1.lerp(end, time); + } + PathEl::CurveTo(point, point1, point2) => { + *point = point.lerp(end, time); + *point1 = point1.lerp(end, time); + *point2 = point2.lerp(end, time); + } + PathEl::ClosePath => {} } - PathEl::ClosePath => {} } + vector_data_instance.append_bezpath(source_path); } - vector_data_instance.append_bezpath(source_path); - } - for mut target_path in target_paths { - target_path.apply_affine(Affine::new(source_transform.to_cols_array())); + for mut target_path in target_paths { + target_path.apply_affine(Affine::new(source_transform.to_cols_array())); - // Skip if the path has no segments else get the point at the start of the path. - let Some(start) = target_path.segments().next().map(|element| element.start()) else { continue }; + // Skip if the path has no segments else get the point at the start of the path. + let Some(start) = target_path.segments().next().map(|element| element.start()) else { continue }; - for element in target_path.elements_mut() { - match element { - PathEl::MoveTo(point) => *point = start.lerp(*point, time), - PathEl::LineTo(point) => *point = start.lerp(*point, time), - PathEl::QuadTo(point, point1) => { - *point = start.lerp(*point, time); - *point1 = start.lerp(*point1, time); - } - PathEl::CurveTo(point, point1, point2) => { - *point = start.lerp(*point, time); - *point1 = start.lerp(*point1, time); - *point2 = start.lerp(*point2, time); + for element in target_path.elements_mut() { + match element { + PathEl::MoveTo(point) => *point = start.lerp(*point, time), + PathEl::LineTo(point) => *point = start.lerp(*point, time), + PathEl::QuadTo(point, point1) => { + *point = start.lerp(*point, time); + *point1 = start.lerp(*point1, time); + } + PathEl::CurveTo(point, point1, point2) => { + *point = start.lerp(*point, time); + *point1 = start.lerp(*point1, time); + *point2 = start.lerp(*point2, time); + } + PathEl::ClosePath => {} } - PathEl::ClosePath => {} } + vector_data_instance.append_bezpath(target_path); } - vector_data_instance.append_bezpath(target_path); - } - - result_table.push(Instance { - instance: vector_data_instance, - alpha_blending: vector_data_alpha_blending, - ..Default::default() - }); - } - result_table + Instance { + instance: vector_data_instance, + alpha_blending: vector_data_alpha_blending, + ..Default::default() + } + }) + .collect() } fn bevel_algorithm(mut vector_data: VectorData, vector_data_transform: DAffine2, distance: f64) -> VectorData { @@ -1928,7 +1915,11 @@ fn bevel_algorithm(mut vector_data: VectorData, vector_data_transform: DAffine2, next_bezier = handles_to_segment(start, BezierHandles::Linear, end); } - let inverse_transform = (vector_data_transform.matrix2.determinant() != 0.).then(|| vector_data_transform.inverse()).unwrap_or_default(); + let inverse_transform = if vector_data_transform.matrix2.determinant() != 0. { + vector_data_transform.inverse() + } else { + Default::default() + }; if index == 0 && next_index == 1 { first_original_length = bezier.perimeter(DEFAULT_ACCURACY); @@ -2010,28 +2001,24 @@ fn bevel_algorithm(mut vector_data: VectorData, vector_data_transform: DAffine2, #[node_macro::node(category("Vector: Modifier"), path(graphene_core::vector))] fn bevel(_: impl Ctx, source: VectorDataTable, #[default(10.)] distance: Length) -> VectorDataTable { - let mut result_table = VectorDataTable::default(); - - for source_instance in source.instance_iter() { - result_table.push(Instance { + source + .instance_iter() + .map(|source_instance| Instance { instance: bevel_algorithm(source_instance.instance, source_instance.transform, distance), ..source_instance - }); - } - - result_table + }) + .collect() } #[node_macro::node(category("Vector: Modifier"), path(graphene_core::vector))] fn close_path(_: impl Ctx, source: VectorDataTable) -> VectorDataTable { - let mut result_table = VectorDataTable::default(); - - for mut source_instance in source.instance_iter() { - source_instance.instance.close_subpaths(); - result_table.push(source_instance); - } - - result_table + source + .instance_iter() + .map(|mut source_instance| { + source_instance.instance.close_subpaths(); + source_instance + }) + .collect() } #[node_macro::node(category("Vector: Measure"), path(graphene_core::vector))] @@ -2162,14 +2149,6 @@ mod test { } } - fn vector_node_from_instances(data: Vec>) -> VectorDataTable { - let mut vector_data_table = VectorDataTable::default(); - for instance in data { - vector_data_table.push(instance); - } - vector_data_table - } - #[tokio::test] async fn repeat() { let direction = DVec2::X * 1.5; @@ -2306,9 +2285,9 @@ mod test { let bezpath = Rect::new(100., 100., 201., 201.).to_path(DEFAULT_ACCURACY); let transform = DAffine2::from_scale(DVec2::new(2., 2.)); let instance = create_vector_data_instance(bezpath, transform); - let instances = (0..5).map(|_| instance.clone()).collect::>>(); + let instances = (0..5).map(|_| instance.clone()).collect::>(); - let length = super::path_length(Footprint::default(), vector_node_from_instances(instances)).await; + let length = super::path_length(Footprint::default(), instances).await; // 101 (each rectangle edge length) * 4 (rectangle perimeter) * 2 (scale) * 5 (number of rows) assert_eq!(length, 101. * 4. * 2. * 5.); diff --git a/node-graph/gpath-bool/src/lib.rs b/node-graph/gpath-bool/src/lib.rs index e94963298b..5a1b08b630 100644 --- a/node-graph/gpath-bool/src/lib.rs +++ b/node-graph/gpath-bool/src/lib.rs @@ -80,8 +80,7 @@ fn union<'a>(vector_data: impl DoubleEndedIterator(vector_data: impl DoubleEndedIterator(vector_data: impl Iterator>) -> VectorDataTable { let mut vector_data = vector_data.into_iter(); - let mut result_vector_data_table = VectorDataTable::default(); - result_vector_data_table.push(vector_data.next().map(|x| x.to_instance_cloned()).unwrap_or_default()); + let mut result_vector_data_table = VectorDataTable::new_instance(vector_data.next().map(|x| x.to_instance_cloned()).unwrap_or_default()); let mut first_instance = result_vector_data_table.instance_mut_iter().next().expect("Expected the one instance we just pushed"); let mut next_vector_data = vector_data.next(); @@ -145,8 +143,7 @@ fn subtract<'a>(vector_data: impl Iterator>) fn intersect<'a>(vector_data: impl DoubleEndedIterator>) -> VectorDataTable { let mut vector_data = vector_data.rev(); - let mut result_vector_data_table = VectorDataTable::default(); - result_vector_data_table.push(vector_data.next().map(|x| x.to_instance_cloned()).unwrap_or_default()); + let mut result_vector_data_table = VectorDataTable::new_instance(vector_data.next().map(|x| x.to_instance_cloned()).unwrap_or_default()); let mut first_instance = result_vector_data_table.instance_mut_iter().next().expect("Expected the one instance we just pushed"); let default = Instance::default(); @@ -225,71 +222,67 @@ fn difference<'a>(vector_data: impl DoubleEndedIterator VectorDataTable { - let mut result_table = VectorDataTable::default(); + graphic_group_table + .instance_ref_iter() + .flat_map(|element| { + match element.instance.clone() { + GraphicElement::VectorData(vector_data) => { + // Apply the parent group's transform to each element of vector data + vector_data + .instance_iter() + .map(|mut sub_vector_data| { + sub_vector_data.transform = *element.transform * sub_vector_data.transform; + + sub_vector_data + }) + .collect::>() + } + GraphicElement::RasterDataCPU(image) => { + let make_instance = |transform| { + // Convert the image frame into a rectangular subpath with the image's transform + let mut subpath = Subpath::new_rect(DVec2::ZERO, DVec2::ONE); + subpath.apply_transform(transform); - for element in graphic_group_table.instance_ref_iter() { - match element.instance.clone() { - GraphicElement::VectorData(vector_data) => { - // Apply the parent group's transform to each element of vector data - for mut sub_vector_data in vector_data.instance_iter() { - sub_vector_data.transform = *element.transform * sub_vector_data.transform; + // Create a vector data table row from the rectangular subpath, with a default black fill + let mut instance = VectorData::from_subpath(subpath); + instance.style.set_fill(Fill::Solid(Color::BLACK)); - result_table.push(sub_vector_data); - } - } - GraphicElement::RasterDataCPU(image) => { - let make_instance = |transform| { - // Convert the image frame into a rectangular subpath with the image's transform - let mut subpath = Subpath::new_rect(DVec2::ZERO, DVec2::ONE); - subpath.apply_transform(transform); - - // Create a vector data table row from the rectangular subpath, with a default black fill - let mut instance = VectorData::from_subpath(subpath); - instance.style.set_fill(Fill::Solid(Color::BLACK)); - - Instance { instance, ..Default::default() } - }; - - // Apply the parent group's transform to each element of raster data - for instance in image.instance_ref_iter() { - result_table.push(make_instance(*element.transform * *instance.transform)); - } - } - GraphicElement::RasterDataGPU(image) => { - let make_instance = |transform| { - // Convert the image frame into a rectangular subpath with the image's transform - let mut subpath = Subpath::new_rect(DVec2::ZERO, DVec2::ONE); - subpath.apply_transform(transform); - - // Create a vector data table row from the rectangular subpath, with a default black fill - let mut instance = VectorData::from_subpath(subpath); - instance.style.set_fill(Fill::Solid(Color::BLACK)); - - Instance { instance, ..Default::default() } - }; - - // Apply the parent group's transform to each element of raster data - for instance in image.instance_ref_iter() { - result_table.push(make_instance(*element.transform * *instance.transform)); + Instance { instance, ..Default::default() } + }; + + // Apply the parent group's transform to each element of raster data + image.instance_ref_iter().map(|instance| make_instance(*element.transform * *instance.transform)).collect::>() } - } - GraphicElement::GraphicGroup(mut graphic_group) => { - // Apply the parent group's transform to each element of inner group - for sub_element in graphic_group.instance_mut_iter() { - *sub_element.transform = *element.transform * *sub_element.transform; + GraphicElement::RasterDataGPU(image) => { + let make_instance = |transform| { + // Convert the image frame into a rectangular subpath with the image's transform + let mut subpath = Subpath::new_rect(DVec2::ZERO, DVec2::ONE); + subpath.apply_transform(transform); + + // Create a vector data table row from the rectangular subpath, with a default black fill + let mut instance = VectorData::from_subpath(subpath); + instance.style.set_fill(Fill::Solid(Color::BLACK)); + + Instance { instance, ..Default::default() } + }; + + // Apply the parent group's transform to each element of raster data + image.instance_ref_iter().map(|instance| make_instance(*element.transform * *instance.transform)).collect::>() } + GraphicElement::GraphicGroup(mut graphic_group) => { + // Apply the parent group's transform to each element of inner group + for sub_element in graphic_group.instance_mut_iter() { + *sub_element.transform = *element.transform * *sub_element.transform; + } - // Recursively flatten the inner group into vector data - let unioned = boolean_operation_on_vector_data_table(flatten_vector_data(&graphic_group).instance_ref_iter(), BooleanOperation::Union); + // Recursively flatten the inner group into vector data + let unioned = boolean_operation_on_vector_data_table(flatten_vector_data(&graphic_group).instance_ref_iter(), BooleanOperation::Union); - for element in unioned.instance_iter() { - result_table.push(element); + unioned.instance_iter().collect::>() } } - } - } - - result_table + }) + .collect() } fn to_path(vector: &VectorData, transform: DAffine2) -> Vec { diff --git a/node-graph/graster-nodes/src/dehaze.rs b/node-graph/graster-nodes/src/dehaze.rs index 268ccee09d..6c6246b964 100644 --- a/node-graph/graster-nodes/src/dehaze.rs +++ b/node-graph/graster-nodes/src/dehaze.rs @@ -8,34 +8,33 @@ use std::cmp::{max, min}; #[node_macro::node(category("Raster: Filter"))] async fn dehaze(_: impl Ctx, image_frame: RasterDataTable, strength: Percentage) -> RasterDataTable { - let mut result_table = RasterDataTable::default(); - - for mut image_frame_instance in image_frame.instance_iter() { - let image = image_frame_instance.instance; - // Prepare the image data for processing - let image_data = bytemuck::cast_vec(image.data.clone()); - let image_buffer = image::Rgba32FImage::from_raw(image.width, image.height, image_data).expect("Failed to convert internal image format into image-rs data type."); - let dynamic_image: DynamicImage = image_buffer.into(); - - // Run the dehaze algorithm - let dehazed_dynamic_image = dehaze_image(dynamic_image, strength / 100.); - - // Prepare the image data for returning - let buffer = dehazed_dynamic_image.to_rgba32f().into_raw(); - let color_vec = bytemuck::cast_vec(buffer); - let dehazed_image = Image { - width: image.width, - height: image.height, - data: color_vec, - base64_string: None, - }; - - image_frame_instance.instance = Raster::new_cpu(dehazed_image); - image_frame_instance.source_node_id = None; - result_table.push(image_frame_instance); - } - - result_table + image_frame + .instance_iter() + .map(|mut image_frame_instance| { + let image = image_frame_instance.instance; + // Prepare the image data for processing + let image_data = bytemuck::cast_vec(image.data.clone()); + let image_buffer = image::Rgba32FImage::from_raw(image.width, image.height, image_data).expect("Failed to convert internal image format into image-rs data type."); + let dynamic_image: DynamicImage = image_buffer.into(); + + // Run the dehaze algorithm + let dehazed_dynamic_image = dehaze_image(dynamic_image, strength / 100.); + + // Prepare the image data for returning + let buffer = dehazed_dynamic_image.to_rgba32f().into_raw(); + let color_vec = bytemuck::cast_vec(buffer); + let dehazed_image = Image { + width: image.width, + height: image.height, + data: color_vec, + base64_string: None, + }; + + image_frame_instance.instance = Raster::new_cpu(dehazed_image); + image_frame_instance.source_node_id = None; + image_frame_instance + }) + .collect() } // There is no real point in modifying these values because they do not change the final result all that much. diff --git a/node-graph/graster-nodes/src/filter.rs b/node-graph/graster-nodes/src/filter.rs index 3fd1f0686c..b5c4bbf9b4 100644 --- a/node-graph/graster-nodes/src/filter.rs +++ b/node-graph/graster-nodes/src/filter.rs @@ -20,27 +20,26 @@ async fn blur( /// Opt to incorrectly apply the filter with color calculations in gamma space for compatibility with the results from other software. gamma: bool, ) -> RasterDataTable { - let mut result_table = RasterDataTable::default(); - - for mut image_instance in image_frame.instance_iter() { - let image = image_instance.instance.clone(); - - // Run blur algorithm - let blurred_image = if radius < 0.1 { - // Minimum blur radius - image.clone() - } else if box_blur { - Raster::new_cpu(box_blur_algorithm(image.into_data(), radius, gamma)) - } else { - Raster::new_cpu(gaussian_blur_algorithm(image.into_data(), radius, gamma)) - }; - - image_instance.instance = blurred_image; - image_instance.source_node_id = None; - result_table.push(image_instance); - } - - result_table + image_frame + .instance_iter() + .map(|mut image_instance| { + let image = image_instance.instance.clone(); + + // Run blur algorithm + let blurred_image = if radius < 0.1 { + // Minimum blur radius + image.clone() + } else if box_blur { + Raster::new_cpu(box_blur_algorithm(image.into_data(), radius, gamma)) + } else { + Raster::new_cpu(gaussian_blur_algorithm(image.into_data(), radius, gamma)) + }; + + image_instance.instance = blurred_image; + image_instance.source_node_id = None; + image_instance + }) + .collect() } // 1D gaussian kernel diff --git a/node-graph/graster-nodes/src/std_nodes.rs b/node-graph/graster-nodes/src/std_nodes.rs index 9b331424d3..3df24c364c 100644 --- a/node-graph/graster-nodes/src/std_nodes.rs +++ b/node-graph/graster-nodes/src/std_nodes.rs @@ -31,69 +31,68 @@ impl From for Error { #[node_macro::node(category("Debug: Raster"))] pub fn sample_image(ctx: impl ExtractFootprint + Clone + Send, image_frame: RasterDataTable) -> RasterDataTable { - let mut result_table = RasterDataTable::default(); - - for mut image_frame_instance in image_frame.instance_iter() { - let image_frame_transform = image_frame_instance.transform; - let image = image_frame_instance.instance; - - // Resize the image using the image crate - let data = bytemuck::cast_vec(image.data.clone()); - - let footprint = ctx.footprint(); - let viewport_bounds = footprint.viewport_bounds_in_local_space(); - let image_bounds = Bbox::from_transform(image_frame_transform).to_axis_aligned_bbox(); - let intersection = viewport_bounds.intersect(&image_bounds); - let image_size = DAffine2::from_scale(DVec2::new(image.width as f64, image.height as f64)); - let size = intersection.size(); - let size_px = image_size.transform_vector2(size).as_uvec2(); - - // If the image would not be visible, add nothing. - if size.x <= 0. || size.y <= 0. { - continue; - } - - let image_buffer = ::image::Rgba32FImage::from_raw(image.width, image.height, data).expect("Failed to convert internal image format into image-rs data type."); - - let dynamic_image: ::image::DynamicImage = image_buffer.into(); - let offset = (intersection.start - image_bounds.start).max(DVec2::ZERO); - let offset_px = image_size.transform_vector2(offset).as_uvec2(); - let cropped = dynamic_image.crop_imm(offset_px.x, offset_px.y, size_px.x, size_px.y); - - let viewport_resolution_x = footprint.transform.transform_vector2(DVec2::X * size.x).length(); - let viewport_resolution_y = footprint.transform.transform_vector2(DVec2::Y * size.y).length(); - let mut new_width = size_px.x; - let mut new_height = size_px.y; - - // Only downscale the image for now - let resized = if new_width < image.width || new_height < image.height { - new_width = viewport_resolution_x as u32; - new_height = viewport_resolution_y as u32; - // TODO: choose filter based on quality requirements - cropped.resize_exact(new_width, new_height, ::image::imageops::Triangle) - } else { - cropped - }; - let buffer = resized.to_rgba32f(); - let buffer = buffer.into_raw(); - let vec = bytemuck::cast_vec(buffer); - let image = Image { - width: new_width, - height: new_height, - data: vec, - base64_string: None, - }; - // we need to adjust the offset if we truncate the offset calculation - - let new_transform = image_frame_transform * DAffine2::from_translation(offset) * DAffine2::from_scale(size); - - image_frame_instance.transform = new_transform; - image_frame_instance.source_node_id = None; - image_frame_instance.instance = Raster::new_cpu(image); - result_table.push(image_frame_instance) - } + image_frame + .instance_iter() + .filter_map(|mut image_frame_instance| { + let image_frame_transform = image_frame_instance.transform; + let image = image_frame_instance.instance; + + // Resize the image using the image crate + let data = bytemuck::cast_vec(image.data.clone()); + + let footprint = ctx.footprint(); + let viewport_bounds = footprint.viewport_bounds_in_local_space(); + let image_bounds = Bbox::from_transform(image_frame_transform).to_axis_aligned_bbox(); + let intersection = viewport_bounds.intersect(&image_bounds); + let image_size = DAffine2::from_scale(DVec2::new(image.width as f64, image.height as f64)); + let size = intersection.size(); + let size_px = image_size.transform_vector2(size).as_uvec2(); + + // If the image would not be visible, add nothing. + if size.x <= 0. || size.y <= 0. { + return None; + } - result_table + let image_buffer = ::image::Rgba32FImage::from_raw(image.width, image.height, data).expect("Failed to convert internal image format into image-rs data type."); + + let dynamic_image: ::image::DynamicImage = image_buffer.into(); + let offset = (intersection.start - image_bounds.start).max(DVec2::ZERO); + let offset_px = image_size.transform_vector2(offset).as_uvec2(); + let cropped = dynamic_image.crop_imm(offset_px.x, offset_px.y, size_px.x, size_px.y); + + let viewport_resolution_x = footprint.transform.transform_vector2(DVec2::X * size.x).length(); + let viewport_resolution_y = footprint.transform.transform_vector2(DVec2::Y * size.y).length(); + let mut new_width = size_px.x; + let mut new_height = size_px.y; + + // Only downscale the image for now + let resized = if new_width < image.width || new_height < image.height { + new_width = viewport_resolution_x as u32; + new_height = viewport_resolution_y as u32; + // TODO: choose filter based on quality requirements + cropped.resize_exact(new_width, new_height, ::image::imageops::Triangle) + } else { + cropped + }; + let buffer = resized.to_rgba32f(); + let buffer = buffer.into_raw(); + let vec = bytemuck::cast_vec(buffer); + let image = Image { + width: new_width, + height: new_height, + data: vec, + base64_string: None, + }; + // we need to adjust the offset if we truncate the offset calculation + + let new_transform = image_frame_transform * DAffine2::from_translation(offset) * DAffine2::from_scale(size); + + image_frame_instance.transform = new_transform; + image_frame_instance.source_node_id = None; + image_frame_instance.instance = Raster::new_cpu(image); + Some(image_frame_instance) + }) + .collect() } #[node_macro::node(category("Raster: Channels"))] @@ -105,84 +104,85 @@ pub fn combine_channels( #[expose] blue: RasterDataTable, #[expose] alpha: RasterDataTable, ) -> RasterDataTable { - let mut result_table = RasterDataTable::default(); - let max_len = red.len().max(green.len()).max(blue.len()).max(alpha.len()); let red = red.instance_iter().map(Some).chain(std::iter::repeat(None)).take(max_len); let green = green.instance_iter().map(Some).chain(std::iter::repeat(None)).take(max_len); let blue = blue.instance_iter().map(Some).chain(std::iter::repeat(None)).take(max_len); let alpha = alpha.instance_iter().map(Some).chain(std::iter::repeat(None)).take(max_len); - for (((red, green), blue), alpha) in red.zip(green).zip(blue).zip(alpha) { - // Turn any default zero-sized image instances into None - let red = red.filter(|i| i.instance.width > 0 && i.instance.height > 0); - let green = green.filter(|i| i.instance.width > 0 && i.instance.height > 0); - let blue = blue.filter(|i| i.instance.width > 0 && i.instance.height > 0); - let alpha = alpha.filter(|i| i.instance.width > 0 && i.instance.height > 0); - - // Get this instance's transform and alpha blending mode from the first non-empty channel - let Some((transform, alpha_blending)) = [&red, &green, &blue, &alpha].iter().find_map(|i| i.as_ref()).map(|i| (i.transform, i.alpha_blending)) else { - continue; - }; - - // Get the common width and height of the channels, which must have equal dimensions - let channel_dimensions = [ - red.as_ref().map(|r| (r.instance.width, r.instance.height)), - green.as_ref().map(|g| (g.instance.width, g.instance.height)), - blue.as_ref().map(|b| (b.instance.width, b.instance.height)), - alpha.as_ref().map(|a| (a.instance.width, a.instance.height)), - ]; - if channel_dimensions.iter().all(Option::is_none) - || channel_dimensions - .iter() - .flatten() - .any(|&(x, y)| channel_dimensions.iter().flatten().any(|&(other_x, other_y)| x != other_x || y != other_y)) - { - continue; - } - let Some(&(width, height)) = channel_dimensions.iter().flatten().next() else { continue }; - - // Create a new image for this instance output - let mut image = Image::new(width, height, Color::TRANSPARENT); - - // Iterate over all pixels in the image and set the color channels - for y in 0..image.height() { - for x in 0..image.width() { - let image_pixel = image.get_pixel_mut(x, y).unwrap(); - - if let Some(r) = red.as_ref().and_then(|r| r.instance.get_pixel(x, y)) { - image_pixel.set_red(r.l().cast_linear_channel()); - } else { - image_pixel.set_red(Channel::from_linear(0.)); - } - if let Some(g) = green.as_ref().and_then(|g| g.instance.get_pixel(x, y)) { - image_pixel.set_green(g.l().cast_linear_channel()); - } else { - image_pixel.set_green(Channel::from_linear(0.)); - } - if let Some(b) = blue.as_ref().and_then(|b| b.instance.get_pixel(x, y)) { - image_pixel.set_blue(b.l().cast_linear_channel()); - } else { - image_pixel.set_blue(Channel::from_linear(0.)); - } - if let Some(a) = alpha.as_ref().and_then(|a| a.instance.get_pixel(x, y)) { - image_pixel.set_alpha(a.l().cast_linear_channel()); - } else { - image_pixel.set_alpha(Channel::from_linear(1.)); + red.zip(green) + .zip(blue) + .zip(alpha) + .filter_map(|(((red, green), blue), alpha)| { + // Turn any default zero-sized image instances into None + let red = red.filter(|i| i.instance.width > 0 && i.instance.height > 0); + let green = green.filter(|i| i.instance.width > 0 && i.instance.height > 0); + let blue = blue.filter(|i| i.instance.width > 0 && i.instance.height > 0); + let alpha = alpha.filter(|i| i.instance.width > 0 && i.instance.height > 0); + + // Get this instance's transform and alpha blending mode from the first non-empty channel + let Some((transform, alpha_blending)) = [&red, &green, &blue, &alpha].iter().find_map(|i| i.as_ref()).map(|i| (i.transform, i.alpha_blending)) else { + return None; + }; + + // Get the common width and height of the channels, which must have equal dimensions + let channel_dimensions = [ + red.as_ref().map(|r| (r.instance.width, r.instance.height)), + green.as_ref().map(|g| (g.instance.width, g.instance.height)), + blue.as_ref().map(|b| (b.instance.width, b.instance.height)), + alpha.as_ref().map(|a| (a.instance.width, a.instance.height)), + ]; + if channel_dimensions.iter().all(Option::is_none) + || channel_dimensions + .iter() + .flatten() + .any(|&(x, y)| channel_dimensions.iter().flatten().any(|&(other_x, other_y)| x != other_x || y != other_y)) + { + return None; + } + let Some(&(width, height)) = channel_dimensions.iter().flatten().next() else { + return None; + }; + + // Create a new image for this instance output + let mut image = Image::new(width, height, Color::TRANSPARENT); + + // Iterate over all pixels in the image and set the color channels + for y in 0..image.height() { + for x in 0..image.width() { + let image_pixel = image.get_pixel_mut(x, y).unwrap(); + + if let Some(r) = red.as_ref().and_then(|r| r.instance.get_pixel(x, y)) { + image_pixel.set_red(r.l().cast_linear_channel()); + } else { + image_pixel.set_red(Channel::from_linear(0.)); + } + if let Some(g) = green.as_ref().and_then(|g| g.instance.get_pixel(x, y)) { + image_pixel.set_green(g.l().cast_linear_channel()); + } else { + image_pixel.set_green(Channel::from_linear(0.)); + } + if let Some(b) = blue.as_ref().and_then(|b| b.instance.get_pixel(x, y)) { + image_pixel.set_blue(b.l().cast_linear_channel()); + } else { + image_pixel.set_blue(Channel::from_linear(0.)); + } + if let Some(a) = alpha.as_ref().and_then(|a| a.instance.get_pixel(x, y)) { + image_pixel.set_alpha(a.l().cast_linear_channel()); + } else { + image_pixel.set_alpha(Channel::from_linear(1.)); + } } } - } - - // Add this instance to the result table - result_table.push(Instance { - instance: Raster::new_cpu(image), - transform, - alpha_blending, - source_node_id: None, - }); - } - result_table + Some(Instance { + instance: Raster::new_cpu(image), + transform, + alpha_blending, + source_node_id: None, + }) + }) + .collect() } #[node_macro::node(category("Raster"))] @@ -201,91 +201,85 @@ pub fn mask( }; let stencil_size = DVec2::new(stencil_instance.instance.width as f64, stencil_instance.instance.height as f64); - let mut result_table = RasterDataTable::default(); - - for mut image_instance in image.instance_iter() { - let image_size = DVec2::new(image_instance.instance.width as f64, image_instance.instance.height as f64); - let mask_size = stencil_instance.transform.decompose_scale(); - - if mask_size == DVec2::ZERO { - continue; - } + image + .instance_iter() + .filter_map(|mut image_instance| { + let image_size = DVec2::new(image_instance.instance.width as f64, image_instance.instance.height as f64); + let mask_size = stencil_instance.transform.decompose_scale(); - // Transforms a point from the background image to the foreground image - let bg_to_fg = image_instance.transform * DAffine2::from_scale(1. / image_size); - let stencil_transform_inverse = stencil_instance.transform.inverse(); - - for y in 0..image_instance.instance.height { - for x in 0..image_instance.instance.width { - let image_point = DVec2::new(x as f64, y as f64); - let mask_point = bg_to_fg.transform_point2(image_point); - let local_mask_point = stencil_transform_inverse.transform_point2(mask_point); - let mask_point = stencil_instance.transform.transform_point2(local_mask_point.clamp(DVec2::ZERO, DVec2::ONE)); - let mask_point = (DAffine2::from_scale(stencil_size) * stencil_instance.transform.inverse()).transform_point2(mask_point); - - let image_pixel = image_instance.instance.data_mut().get_pixel_mut(x, y).unwrap(); - let mask_pixel = stencil_instance.instance.sample(mask_point); - *image_pixel = image_pixel.multiplied_alpha(mask_pixel.l().cast_linear_channel()); + if mask_size == DVec2::ZERO { + return None; } - } - result_table.push(image_instance); - } + // Transforms a point from the background image to the foreground image + let bg_to_fg = image_instance.transform * DAffine2::from_scale(1. / image_size); + let stencil_transform_inverse = stencil_instance.transform.inverse(); + + for y in 0..image_instance.instance.height { + for x in 0..image_instance.instance.width { + let image_point = DVec2::new(x as f64, y as f64); + let mask_point = bg_to_fg.transform_point2(image_point); + let local_mask_point = stencil_transform_inverse.transform_point2(mask_point); + let mask_point = stencil_instance.transform.transform_point2(local_mask_point.clamp(DVec2::ZERO, DVec2::ONE)); + let mask_point = (DAffine2::from_scale(stencil_size) * stencil_instance.transform.inverse()).transform_point2(mask_point); + + let image_pixel = image_instance.instance.data_mut().get_pixel_mut(x, y).unwrap(); + let mask_pixel = stencil_instance.instance.sample(mask_point); + *image_pixel = image_pixel.multiplied_alpha(mask_pixel.l().cast_linear_channel()); + } + } - result_table + Some(image_instance) + }) + .collect() } #[node_macro::node(category(""))] pub fn extend_image_to_bounds(_: impl Ctx, image: RasterDataTable, bounds: DAffine2) -> RasterDataTable { - let mut result_table = RasterDataTable::default(); - - for mut image_instance in image.instance_iter() { - let image_aabb = Bbox::unit().affine_transform(image_instance.transform).to_axis_aligned_bbox(); - let bounds_aabb = Bbox::unit().affine_transform(bounds.transform()).to_axis_aligned_bbox(); - if image_aabb.contains(bounds_aabb.start) && image_aabb.contains(bounds_aabb.end) { - result_table.push(image_instance); - continue; - } - - let image_data = &image_instance.instance.data; - let (image_width, image_height) = (image_instance.instance.width, image_instance.instance.height); - if image_width == 0 || image_height == 0 { - for image_instance in empty_image((), bounds, Color::TRANSPARENT).instance_iter() { - result_table.push(image_instance); + image + .instance_iter() + .map(|mut image_instance| { + let image_aabb = Bbox::unit().affine_transform(image_instance.transform).to_axis_aligned_bbox(); + let bounds_aabb = Bbox::unit().affine_transform(bounds.transform()).to_axis_aligned_bbox(); + if image_aabb.contains(bounds_aabb.start) && image_aabb.contains(bounds_aabb.end) { + return image_instance; } - continue; - } - let orig_image_scale = DVec2::new(image_width as f64, image_height as f64); - let layer_to_image_space = DAffine2::from_scale(orig_image_scale) * image_instance.transform.inverse(); - let bounds_in_image_space = Bbox::unit().affine_transform(layer_to_image_space * bounds).to_axis_aligned_bbox(); - - let new_start = bounds_in_image_space.start.floor().min(DVec2::ZERO); - let new_end = bounds_in_image_space.end.ceil().max(orig_image_scale); - let new_scale = new_end - new_start; - - // Copy over original image into enlarged image. - let mut new_image = Image::new(new_scale.x as u32, new_scale.y as u32, Color::TRANSPARENT); - let offset_in_new_image = (-new_start).as_uvec2(); - for y in 0..image_height { - let old_start = y * image_width; - let new_start = (y + offset_in_new_image.y) * new_image.width + offset_in_new_image.x; - let old_row = &image_data[old_start as usize..(old_start + image_width) as usize]; - let new_row = &mut new_image.data[new_start as usize..(new_start + image_width) as usize]; - new_row.copy_from_slice(old_row); - } + let image_data = &image_instance.instance.data; + let (image_width, image_height) = (image_instance.instance.width, image_instance.instance.height); + if image_width == 0 || image_height == 0 { + return empty_image((), bounds, Color::TRANSPARENT).instance_iter().next().unwrap(); + } - // Compute new transform. - // let layer_to_new_texture_space = (DAffine2::from_scale(1. / new_scale) * DAffine2::from_translation(new_start) * layer_to_image_space).inverse(); - let new_texture_to_layer_space = image_instance.transform * DAffine2::from_scale(1. / orig_image_scale) * DAffine2::from_translation(new_start) * DAffine2::from_scale(new_scale); + let orig_image_scale = DVec2::new(image_width as f64, image_height as f64); + let layer_to_image_space = DAffine2::from_scale(orig_image_scale) * image_instance.transform.inverse(); + let bounds_in_image_space = Bbox::unit().affine_transform(layer_to_image_space * bounds).to_axis_aligned_bbox(); + + let new_start = bounds_in_image_space.start.floor().min(DVec2::ZERO); + let new_end = bounds_in_image_space.end.ceil().max(orig_image_scale); + let new_scale = new_end - new_start; + + // Copy over original image into enlarged image. + let mut new_image = Image::new(new_scale.x as u32, new_scale.y as u32, Color::TRANSPARENT); + let offset_in_new_image = (-new_start).as_uvec2(); + for y in 0..image_height { + let old_start = y * image_width; + let new_start = (y + offset_in_new_image.y) * new_image.width + offset_in_new_image.x; + let old_row = &image_data[old_start as usize..(old_start + image_width) as usize]; + let new_row = &mut new_image.data[new_start as usize..(new_start + image_width) as usize]; + new_row.copy_from_slice(old_row); + } - image_instance.instance = Raster::new_cpu(new_image); - image_instance.transform = new_texture_to_layer_space; - image_instance.source_node_id = None; - result_table.push(image_instance); - } + // Compute new transform. + // let layer_to_new_texture_space = (DAffine2::from_scale(1. / new_scale) * DAffine2::from_translation(new_start) * layer_to_image_space).inverse(); + let new_texture_to_layer_space = image_instance.transform * DAffine2::from_scale(1. / orig_image_scale) * DAffine2::from_translation(new_start) * DAffine2::from_scale(new_scale); - result_table + image_instance.instance = Raster::new_cpu(new_image); + image_instance.transform = new_texture_to_layer_space; + image_instance.source_node_id = None; + image_instance + }) + .collect() } #[node_macro::node(category("Debug: Raster"))] @@ -392,14 +386,11 @@ pub fn noise_pattern( } } - let mut result = RasterDataTable::default(); - result.push(Instance { + return RasterDataTable::new_instance(Instance { instance: Raster::new_cpu(image), transform: DAffine2::from_translation(offset) * DAffine2::from_scale(size), ..Default::default() }); - - return result; } }; noise.set_noise_type(Some(noise_type)); @@ -457,14 +448,11 @@ pub fn noise_pattern( } } - let mut result = RasterDataTable::default(); - result.push(Instance { + RasterDataTable::new_instance(Instance { instance: Raster::new_cpu(image), transform: DAffine2::from_translation(offset) * DAffine2::from_scale(size), ..Default::default() - }); - - result + }) } #[node_macro::node(category("Raster: Pattern"))] @@ -502,20 +490,16 @@ pub fn mandelbrot(ctx: impl ExtractFootprint + Send) -> RasterDataTable { } } - let image = Image { - width, - height, - data, - ..Default::default() - }; - let mut result = RasterDataTable::default(); - result.push(Instance { - instance: Raster::new_cpu(image), + RasterDataTable::new_instance(Instance { + instance: Raster::new_cpu(Image { + width, + height, + data, + ..Default::default() + }), transform: DAffine2::from_translation(offset) * DAffine2::from_scale(size), ..Default::default() - }); - - result + }) } #[inline(always)] diff --git a/node-graph/gstd/src/wasm_application_io.rs b/node-graph/gstd/src/wasm_application_io.rs index 4ab9e5093b..a40309f88e 100644 --- a/node-graph/gstd/src/wasm_application_io.rs +++ b/node-graph/gstd/src/wasm_application_io.rs @@ -292,15 +292,12 @@ where let rasterized = context.get_image_data(0., 0., resolution.x as f64, resolution.y as f64).unwrap(); - let mut result = RasterDataTable::default(); let image = Image::from_image_data(&rasterized.data().0, resolution.x as u32, resolution.y as u32); - result.push(Instance { + RasterDataTable::new_instance(Instance { instance: Raster::new_cpu(image), transform: footprint.transform, ..Default::default() - }); - - result + }) } #[node_macro::node(category(""))] diff --git a/node-graph/gsvg-renderer/src/renderer.rs b/node-graph/gsvg-renderer/src/renderer.rs index aba185c951..dd3a1ebb08 100644 --- a/node-graph/gsvg-renderer/src/renderer.rs +++ b/node-graph/gsvg-renderer/src/renderer.rs @@ -38,10 +38,10 @@ impl MaskType { } fn write_to_defs(self, svg_defs: &mut String, uuid: u64, svg_string: String) { - let id = format!("mask-{}", uuid); + let id = format!("mask-{uuid}"); match self { - Self::Clip => write!(svg_defs, r##"{}"##, svg_string).unwrap(), - Self::Mask => write!(svg_defs, r##"{}"##, svg_string).unwrap(), + Self::Clip => write!(svg_defs, r##"{svg_string}"##).unwrap(), + Self::Mask => write!(svg_defs, r##"{svg_string}"##).unwrap(), } } } @@ -89,9 +89,9 @@ impl SvgRender { .unwrap_or_default(); let matrix = format_transform_matrix(transform); - let transform = if matrix.is_empty() { String::new() } else { format!(r#" transform="{}""#, matrix) }; + let transform = if matrix.is_empty() { String::new() } else { format!(r#" transform="{matrix}""#) }; - let svg_header = format!(r#"{defs}"#, view_box); + let svg_header = format!(r#"{defs}"#); self.svg.insert(0, svg_header.into()); self.svg.push("".into()); } @@ -267,7 +267,7 @@ impl GraphicElementRendered for GraphicGroupTable { mask_state = None; } - let id = format!("mask-{}", uuid); + let id = format!("mask-{uuid}"); let selector = format!("url(#{id})"); attributes.push(mask_type.to_attribute(), selector); @@ -444,18 +444,18 @@ impl GraphicElementRendered for VectorDataTable { let can_use_order = !instance.instance.style.fill().is_none() && mask_type == MaskType::Mask; if !can_use_order { let id = format!("alignment-{}", generate_uuid()); - let mut vector_row = VectorDataTable::default(); - let mut fill_instance = instance.instance.clone(); + let mut fill_instance = instance.instance.clone(); fill_instance.style.clear_stroke(); fill_instance.style.set_fill(Fill::solid(Color::BLACK)); - vector_row.push(Instance { + let vector_row = VectorDataTable::new_instance(Instance { instance: fill_instance, alpha_blending: *instance.alpha_blending, transform: *instance.transform, source_node_id: None, }); + push_id = Some((id, mask_type, vector_row)); } } @@ -477,7 +477,7 @@ impl GraphicElementRendered for VectorDataTable { let (x, y) = quad.top_left().into(); let (width, height) = (quad.bottom_right() - quad.top_left()).into(); write!(defs, r##"{}"##, svg.svg_defs).unwrap(); - let rect = format!(r##""##, x, y); + let rect = format!(r##""##); match mask_type { MaskType::Clip => write!(defs, r##"{}"##, svg.svg.to_svg_string()).unwrap(), MaskType::Mask => write!(defs, r##"{}{}"##, rect, svg.svg.to_svg_string()).unwrap(), @@ -564,13 +564,11 @@ impl GraphicElementRendered for VectorDataTable { .stroke() .is_some_and(|stroke| stroke.align == StrokeAlign::Outside && !instance.instance.style.fill().is_none()); if can_draw_aligned_stroke && !reorder_for_outside { - let mut vector_data = VectorDataTable::default(); - let mut fill_instance = instance.instance.clone(); fill_instance.style.clear_stroke(); fill_instance.style.set_fill(Fill::solid(Color::BLACK)); - vector_data.push(Instance { + let vector_data = VectorDataTable::new_instance(Instance { instance: fill_instance, alpha_blending: *instance.alpha_blending, transform: *instance.transform, @@ -639,7 +637,11 @@ impl GraphicElementRendered for VectorDataTable { let bounds = instance.instance.nonzero_bounding_box(); let bound_transform = DAffine2::from_scale_angle_translation(bounds[1] - bounds[0], 0., bounds[0]); - let inverse_parent_transform = (parent_transform.matrix2.determinant() != 0.).then(|| parent_transform.inverse()).unwrap_or_default(); + let inverse_parent_transform = if parent_transform.matrix2.determinant() != 0. { + parent_transform.inverse() + } else { + Default::default() + }; let mod_points = inverse_parent_transform * multiplied_transform * bound_transform; let start = mod_points.transform_point2(gradient.start); @@ -666,7 +668,11 @@ impl GraphicElementRendered for VectorDataTable { }); // Vello does `element_transform * brush_transform` internally. We don't want element_transform to have any impact so we need to left multiply by the inverse. // This makes the final internal brush transform equal to `parent_transform`, allowing you to stretch a gradient by transforming the parent folder. - let inverse_element_transform = (element_transform.matrix2.determinant() != 0.).then(|| element_transform.inverse()).unwrap_or_default(); + let inverse_element_transform = if element_transform.matrix2.determinant() != 0. { + element_transform.inverse() + } else { + Default::default() + }; let brush_transform = kurbo::Affine::new((inverse_element_transform * parent_transform).to_cols_array()); scene.fill(peniko::Fill::NonZero, kurbo::Affine::new(element_transform.to_cols_array()), &fill, Some(brush_transform), &path); } diff --git a/node-graph/node-macro/Cargo.toml b/node-graph/node-macro/Cargo.toml index b63ce4927d..ebc1037d92 100644 --- a/node-graph/node-macro/Cargo.toml +++ b/node-graph/node-macro/Cargo.toml @@ -2,7 +2,7 @@ name = "node-macro" publish = false version = "0.0.0" -rust-version = "1.85" +rust-version = "1.88" authors = ["Graphite Authors "] edition = "2024" readme = "../../README.md" @@ -26,4 +26,3 @@ proc-macro-error2 = "2" [dev-dependencies] graphene-core = { workspace = true } - diff --git a/proc-macros/Cargo.toml b/proc-macros/Cargo.toml index 51417c6ff7..a19e5b598d 100644 --- a/proc-macros/Cargo.toml +++ b/proc-macros/Cargo.toml @@ -2,7 +2,7 @@ name = "graphite-proc-macros" publish = false version = "0.0.0" -rust-version = "1.85" +rust-version = "1.88" authors = ["Graphite Authors "] edition = "2024" readme = "../README.md"