Added footsteps, new tree, various other tweaks

This commit is contained in:
derek
2024-12-05 11:47:34 -06:00
parent 816ae85938
commit 023879ea9f
389 changed files with 20484 additions and 234 deletions

View File

@@ -0,0 +1,50 @@
@tool
extends Resource
# Used by the Domain class
# TODO: This could be replaced by a built-in AABB
var size: Vector3
var center: Vector3
var min: Vector3
var max: Vector3
var _points := 0
func clear() -> void:
size = Vector3.ZERO
center = Vector3.ZERO
min = Vector3.ZERO
max = Vector3.ZERO
_points = 0
func feed(point: Vector3) -> void:
if _points == 0:
min = point
max = point
min = _minv(min, point)
max = _maxv(max, point)
_points += 1
# Call this after you've called feed() with all the points in your data set
func compute_bounds() -> void:
if min == null or max == null:
return
size = max - min
center = min + (size / 2.0)
# Returns a vector with the smallest values in each of the 2 input vectors
func _minv(v1: Vector3, v2: Vector3) -> Vector3:
return Vector3(min(v1.x, v2.x), min(v1.y, v2.y), min(v1.z, v2.z))
# Returns a vector with the highest values in each of the 2 input vectors
func _maxv(v1: Vector3, v2: Vector3) -> Vector3:
return Vector3(max(v1.x, v2.x), max(v1.y, v2.y), max(v1.z, v2.z))

View File

@@ -0,0 +1,27 @@
@tool
class_name ProtonScatterCacheResource
extends Resource
@export var data = {}
func clear() -> void:
data.clear()
func store(node_path: String, transforms: Array[Transform3D]) -> void:
data[node_path] = transforms
func erase(node_path: String) -> void:
data.erase(node_path)
func get_transforms(node_path: String) -> Array[Transform3D]:
var res: Array[Transform3D]
if node_path in data:
res.assign(data[node_path])
return res

View File

@@ -0,0 +1,312 @@
@tool
extends RefCounted
# A domain is the complete area where transforms can (and can't) be placed.
# A Scatter node has one single domain, a domain has one or more shape nodes.
#
# It's the combination of every shape defined under a Scatter node, grouped in
# a single class that exposes utility functions (check if a point is inside, or
# along the surface etc).
#
# An instance of this class is passed to the modifiers during a rebuild.
const ProtonScatter := preload("../scatter.gd")
const ProtonScatterShape := preload("../scatter_shape.gd")
const BaseShape := preload("../shapes/base_shape.gd")
const Bounds := preload("../common/bounds.gd")
class DomainShapeInfo:
var node: Node3D
var shape: BaseShape
func is_point_inside(point: Vector3, local: bool) -> bool:
var t: Transform3D
if is_instance_valid(node):
t = node.get_transform() if local else node.get_global_transform()
return shape.is_point_inside(point, t)
else:
return false
func get_corners_global() -> Array:
return shape.get_corners_global(node.get_global_transform())
# A polygon made of one outer boundary and one or multiple holes (inner polygons)
class ComplexPolygon:
var inner: Array[PackedVector2Array] = []
var outer: PackedVector2Array
func add(polygon: PackedVector2Array) -> void:
if polygon.is_empty(): return
if Geometry2D.is_polygon_clockwise(polygon):
inner.push_back(polygon)
else:
if not outer.is_empty():
print_debug("ProtonScatter error: Replacing polygon's existing outer boundary. This should not happen, please report.")
outer = polygon
func add_array(array: Array, reverse := false) -> void:
for p in array:
if reverse:
p.reverse()
add(p)
func get_all() -> Array[PackedVector2Array]:
var res = inner.duplicate()
res.push_back(outer)
return res
func _to_string() -> String:
var res = "o: " + var_to_str(outer.size()) + ", i: ["
for i in inner:
res += var_to_str(i.size()) + ", "
res += "]"
return res
var root: ProtonScatter
var positive_shapes: Array[DomainShapeInfo]
var negative_shapes: Array[DomainShapeInfo]
var bounds_global: Bounds = Bounds.new()
var bounds_local: Bounds = Bounds.new()
var edges: Array[Curve3D] = []
func is_empty() -> bool:
return positive_shapes.is_empty()
# If a point is in an exclusion shape, returns false
# If a point is in an inclusion shape (but not in an exclusion one), returns true
# If a point is in neither, returns false
func is_point_inside(point: Vector3, local := true) -> bool:
for s in negative_shapes:
if s.is_point_inside(point, local):
return false
for s in positive_shapes:
if s.is_point_inside(point, local):
return true
return false
# If a point is inside an exclusion shape, returns true
# Returns false in every other case
func is_point_excluded(point: Vector3, local := true) -> bool:
for s in negative_shapes:
if s.is_point_inside(point, local):
return true
return false
# Recursively find all ScatterShape nodes under the provided root. In case of
# nested Scatter nodes, shapes under these other Scatter nodes will be ignored
func discover_shapes(root_node: Node3D) -> void:
root = root_node
positive_shapes.clear()
negative_shapes.clear()
if not is_instance_valid(root):
return
for c in root.get_children():
_discover_shapes_recursive(c)
compute_bounds()
compute_edges()
func compute_bounds() -> void:
bounds_global.clear()
bounds_local.clear()
if not is_instance_valid(root):
return
var gt: Transform3D = root.get_global_transform().affine_inverse()
for info in positive_shapes:
for point in info.get_corners_global():
bounds_global.feed(point)
bounds_local.feed(gt * point)
bounds_global.compute_bounds()
bounds_local.compute_bounds()
func compute_edges() -> void:
edges.clear()
if not is_instance_valid(root):
return
var source_polygons: Array[ComplexPolygon] = []
## Retrieve all polygons
for info in positive_shapes:
# Store all closed polygons in a specific array
var polygon := ComplexPolygon.new()
polygon.add_array(info.shape.get_closed_edges(info.node.transform))
# Polygons with holes must be merged together first
if not polygon.inner.is_empty():
source_polygons.push_back(polygon)
else:
source_polygons.push_front(polygon)
# Store open edges directly since they are already Curve3D and we
# don't apply boolean operations to them.
var open_edges = info.shape.get_open_edges(info.node.transform)
edges.append_array(open_edges)
if source_polygons.is_empty():
return
## Merge all closed polygons together
var merged_polygons: Array[ComplexPolygon] = []
while not source_polygons.is_empty():
var merged := false
var p1: ComplexPolygon = source_polygons.pop_back()
var max_steps: int = source_polygons.size()
var i = 0
# Test p1 against every other polygon from source_polygon until a
# successful merge. If no merge happened, put it in the final array.
while i < max_steps and not merged:
i += 1
# Get the next polygon in the list
var p2: ComplexPolygon = source_polygons.pop_back()
# If the outer boundary of any of the two polygons is completely
# enclosed in one of the other polygon's hole, we don't try to
# merge them and go the next iteration.
var full_overlap = false
for ip1 in p1.inner:
var res = Geometry2D.clip_polygons(p2.outer, ip1)
if res.is_empty():
full_overlap = true
break
for ip2 in p2.inner:
var res = Geometry2D.clip_polygons(p1.outer, ip2)
if res.is_empty():
full_overlap = true
break
if full_overlap:
source_polygons.push_front(p2)
continue
# Try to merge the two polygons p1 and p2
var res = Geometry2D.merge_polygons(p1.outer, p2.outer)
var outer_polygons := 0
for p in res:
if not Geometry2D.is_polygon_clockwise(p):
outer_polygons += 1
# If the merge generated a new polygon, process the holes data from
# the two original polygons and store in the new_polygon
# P1 and P2 are then discarded and replaced by the new polygon.
if outer_polygons == 1:
var new_polygon = ComplexPolygon.new()
new_polygon.add_array(res)
# Process the holes data from p1 and p2
for ip1 in p1.inner:
for ip2 in p2.inner:
new_polygon.add_array(Geometry2D.intersect_polygons(ip1, ip2), true)
new_polygon.add_array(Geometry2D.clip_polygons(ip2, p1.outer), true)
new_polygon.add_array(Geometry2D.clip_polygons(ip1, p2.outer), true)
source_polygons.push_back(new_polygon)
merged = true
# If the polygons don't overlap, return it to the pool to be tested
# against other polygons
else:
source_polygons.push_front(p2)
# If p1 is not overlapping any other polygon, add it to the final list
if not merged:
merged_polygons.push_back(p1)
## For each polygons from the previous step, create a corresponding Curve3D
for cp in merged_polygons:
for polygon in cp.get_all():
if polygon.size() < 2: # Ignore polygons too small to form a loop
continue
var curve := Curve3D.new()
for point in polygon:
curve.add_point(Vector3(point.x, 0.0, point.y))
# Close the look if the last vertex is missing (Randomly happens)
var first_point := polygon[0]
var last_point := polygon[-1]
if first_point != last_point:
curve.add_point(Vector3(first_point.x, 0.0, first_point.y))
edges.push_back(curve)
func get_root() -> ProtonScatter:
return root
func get_global_transform() -> Transform3D:
return root.get_global_transform()
func get_local_transform() -> Transform3D:
return root.get_transform()
func get_edges() -> Array[Curve3D]:
if edges.is_empty():
compute_edges()
return edges
func get_copy():
var copy = get_script().new()
copy.root = root
copy.bounds_global = bounds_global
copy.bounds_local = bounds_local
for s in positive_shapes:
var s_copy = DomainShapeInfo.new()
s_copy.node = s.node
s_copy.shape = s.shape.get_copy()
copy.positive_shapes.push_back(s_copy)
for s in negative_shapes:
var s_copy = DomainShapeInfo.new()
s_copy.node = s.node
s_copy.shape = s.shape.get_copy()
copy.negative_shapes.push_back(s_copy)
return copy
func _discover_shapes_recursive(node: Node) -> void:
if node is ProtonScatter: # Ignore shapes under nested Scatter nodes
return
if node is ProtonScatterShape and node.shape != null:
var info := DomainShapeInfo.new()
info.node = node
info.shape = node.shape
if node.negative:
negative_shapes.push_back(info)
else:
positive_shapes.push_back(info)
for c in node.get_children():
_discover_shapes_recursive(c)

View File

@@ -0,0 +1,72 @@
extends RefCounted
# Utility class that mimics the Input class behavior
#
# This only useful when using actions from the Input class isn't possible,
# like in _unhandled_input or forward_3d_gui_input for example, where you don't
# have a native way to detect if a key was just pressed or released.
#
# How to use:
# Call the feed() method first with the latest event you received, then call
# either of the is_key_* function
#
# If you don't call feed() on the same frame before calling any of these two,
# the behavior is undefined.
var _actions := {}
func feed(event: InputEvent) -> void:
var key
if event is InputEventMouseButton:
key = event.button_index
elif event is InputEventKey:
key = event.keycode
else:
_cleanup_states()
return
if not key in _actions:
_actions[key] = {
pressed = event.pressed,
just_released = not event.pressed,
just_pressed = event.pressed,
}
return
var pressed = _actions[key].pressed
if pressed and not event.pressed:
_actions[key].just_released = true
_actions[key].just_pressed = false
if not pressed and event.pressed:
_actions[key].just_pressed = true
_actions[key].just_released = false
if pressed and event.pressed:
_actions[key].just_pressed = false
_actions[key].just_released = false
_actions[key].pressed = event.pressed
func _cleanup_states() -> void:
for key in _actions:
_actions[key].just_released = false
_actions[key].just_pressed = false
func is_key_just_pressed(key) -> bool:
if key in _actions:
return _actions[key].just_pressed
return false
func is_key_just_released(key) -> bool:
if key in _actions:
return _actions[key].just_released
return false

View File

@@ -0,0 +1,103 @@
@tool
extends Node
# Runs jobs during the physics step.
# Only supports raycast for now, but can easilly be adapted to handle
# the other types of queries.
signal job_completed
const MAX_PHYSICS_QUERIES_SETTING := "addons/proton_scatter/max_physics_queries_per_frame"
var _is_ready := false
var _job_in_progress := false
var _max_queries_per_frame := 400
var _main_thread_id: int
var _queries: Array
var _results: Array[Dictionary]
var _space_state: PhysicsDirectSpaceState3D
func _ready() -> void:
set_physics_process(false)
_main_thread_id = OS.get_thread_caller_id()
_is_ready = true
func _exit_tree():
if _job_in_progress:
_job_in_progress = false
job_completed.emit()
func execute(queries: Array) -> Array[Dictionary]:
if not _is_ready:
printerr("ProtonScatter error: Calling execute on a PhysicsHelper before it's ready, this should not happen.")
return []
# Don't execute physics queries, if the node is not inside the tree.
# This avoids infinite loops, because the _physics_process will never be executed.
# This happens when the Scatter node is removed, while it perform a rebuild with a Thread.
if not is_inside_tree():
printerr("ProtonScatter error: Calling execute on a PhysicsHelper while the node is not inside the tree.")
return []
# Clear previous job if any
_queries.clear()
if _job_in_progress:
await _until(get_tree().physics_frame, func(): return _job_in_progress)
_results.clear()
_queries = queries
_max_queries_per_frame = ProjectSettings.get_setting(MAX_PHYSICS_QUERIES_SETTING, 500)
_job_in_progress = true
set_physics_process.bind(true).call_deferred()
await _until(job_completed, func(): return _job_in_progress, true)
return _results.duplicate()
func _physics_process(_delta: float) -> void:
if _queries.is_empty():
return
if not _space_state:
_space_state = get_tree().get_root().get_world_3d().get_direct_space_state()
var steps = min(_max_queries_per_frame, _queries.size())
for i in steps:
var q = _queries.pop_back()
var hit := _space_state.intersect_ray(q) # TODO: Add support for other operations
_results.push_back(hit)
if _queries.is_empty():
set_physics_process(false)
_results.reverse()
_job_in_progress = false
job_completed.emit()
func _in_main_thread() -> bool:
return OS.get_thread_caller_id() == _main_thread_id
func _until(s: Signal, callable: Callable, physics := false) -> void:
if _in_main_thread():
await s
return
# Called from a sub thread
var delay: int = 0
if physics:
delay = round(get_physics_process_delta_time() * 100.0)
else:
delay = round(get_process_delta_time() * 100.0)
while callable.call():
OS.delay_msec(delay)
if not is_inside_tree():
return

View File

@@ -0,0 +1,490 @@
extends Node
# To prevent the other core scripts from becoming too large, some of their
# utility functions are written here (only the functions that don't disturb
# reading the core code, mostly data validation and other verbose checks).
const ProtonScatter := preload("../scatter.gd")
const ProtonScatterItem := preload("../scatter_item.gd")
const ModifierStack := preload("../stack/modifier_stack.gd")
### SCATTER UTILITY FUNCTIONS ###
# Make sure the output node exists. This is the parent node to
# everything generated by the scatter mesh
static func ensure_output_root_exists(s: ProtonScatter) -> void:
# Check if the node exists in the tree
if not s.output_root:
s.output_root = s.get_node_or_null("ScatterOutput")
# If the node is valid, end here
if is_instance_valid(s.output_root) and s.has_node(NodePath(s.output_root.name)):
enforce_output_root_owner(s)
return
# Some conditions are not met, cleanup and recreate the root
if s.output_root:
if s.has_node(NodePath(s.output_root.name)):
s.remove_node(s.output_root.name)
s.output_root.queue_free()
s.output_root = null
s.output_root = Marker3D.new()
s.output_root.name = "ScatterOutput"
s.add_child(s.output_root, true)
enforce_output_root_owner(s)
static func enforce_output_root_owner(s: ProtonScatter) -> void:
if is_instance_valid(s.output_root) and s.is_inside_tree():
if s.show_output_in_tree:
set_owner_recursive(s.output_root, s.get_tree().get_edited_scene_root())
else:
set_owner_recursive(s.output_root, null)
# TMP: Workaround to force the scene tree to update and take in account
# the owner changes. Otherwise it doesn't show until much later.
s.output_root.update_configuration_warnings()
# Item root is a Node3D placed as a child of the ScatterOutput node.
# Each ScatterItem has a corresponding output node, serving as a parent for
# the Multimeshes or duplicates generated by the Scatter node.
static func get_or_create_item_root(item: ProtonScatterItem) -> Node3D:
var s: ProtonScatter = item.get_parent()
ensure_output_root_exists(s)
var item_root: Node3D = s.output_root.get_node_or_null(NodePath(item.name))
if not item_root:
item_root = Node3D.new()
item_root.name = item.name
s.output_root.add_child(item_root, true)
if Engine.is_editor_hint():
item_root.owner = item.get_tree().get_edited_scene_root()
return item_root
static func get_or_create_multimesh(item: ProtonScatterItem, count: int) -> MultiMeshInstance3D:
var item_root := get_or_create_item_root(item)
var mmi: MultiMeshInstance3D = item_root.get_node_or_null("MultiMeshInstance3D")
if not mmi:
mmi = MultiMeshInstance3D.new()
mmi.set_name("MultiMeshInstance3D")
item_root.add_child(mmi, true)
mmi.set_owner(item_root.owner)
if not mmi.multimesh:
mmi.multimesh = MultiMesh.new()
var mesh_instance: MeshInstance3D = get_merged_meshes_from(item)
if not mesh_instance:
return
mmi.position = Vector3.ZERO
mmi.material_override = get_final_material(item, mesh_instance)
mmi.set_cast_shadows_setting(item.override_cast_shadow)
mmi.multimesh.instance_count = 0 # Set this to zero or you can't change the other values
mmi.multimesh.mesh = mesh_instance.mesh
mmi.multimesh.transform_format = MultiMesh.TRANSFORM_3D
mmi.visibility_range_begin = item.visibility_range_begin
mmi.visibility_range_begin_margin = item.visibility_range_begin_margin
mmi.visibility_range_end = item.visibility_range_end
mmi.visibility_range_end_margin = item.visibility_range_end_margin
mmi.visibility_range_fade_mode = item.visibility_range_fade_mode
mmi.layers = item.visibility_layers
mmi.multimesh.instance_count = count
mesh_instance.queue_free()
return mmi
static func get_or_create_multimesh_chunk(item: ProtonScatterItem,
mesh_instance: MeshInstance3D,
index: Vector3i,
count: int)\
-> MultiMeshInstance3D:
var item_root := get_or_create_item_root(item)
var chunk_name = "MultiMeshInstance3D" + "_%s_%s_%s"%[index.x, index.y, index.z]
var mmi: MultiMeshInstance3D = item_root.get_node_or_null(chunk_name)
if not mesh_instance:
return
if not mmi:
mmi = MultiMeshInstance3D.new()
mmi.set_name(chunk_name)
# if set_name is used after add_child it is crazy slow
# This doesn't make much sense but it is definitely the case.
# About a 100x slowdown was observed in this case
item_root.add_child.bind(mmi, true).call_deferred()
if not mmi.multimesh:
mmi.multimesh = MultiMesh.new()
mmi.position = Vector3.ZERO
mmi.material_override = get_final_material(item, mesh_instance)
mmi.set_cast_shadows_setting(item.override_cast_shadow)
mmi.multimesh.instance_count = 0 # Set this to zero or you can't change the other values
mmi.multimesh.mesh = mesh_instance.mesh
mmi.multimesh.transform_format = MultiMesh.TRANSFORM_3D
mmi.visibility_range_begin = item.visibility_range_begin
mmi.visibility_range_begin_margin = item.visibility_range_begin_margin
mmi.visibility_range_end = item.visibility_range_end
mmi.visibility_range_end_margin = item.visibility_range_end_margin
mmi.visibility_range_fade_mode = item.visibility_range_fade_mode
mmi.layers = item.visibility_layers
mmi.multimesh.instance_count = count
return mmi
static func get_or_create_particles(item: ProtonScatterItem) -> GPUParticles3D:
var item_root := get_or_create_item_root(item)
var particles: GPUParticles3D = item_root.get_node_or_null("GPUParticles3D")
if not particles:
particles = GPUParticles3D.new()
particles.set_name("GPUParticles3D")
item_root.add_child(particles)
particles.set_owner(item_root.owner)
var mesh_instance: MeshInstance3D = get_merged_meshes_from(item)
if not mesh_instance:
return
particles.material_override = get_final_material(item, mesh_instance)
particles.set_draw_pass_mesh(0, mesh_instance.mesh)
particles.position = Vector3.ZERO
particles.local_coords = true
particles.layers = item.visibility_layers
# Use the user provided material if it exists.
var process_material: Material = item.override_process_material
# Or load the default one if there's nothing.
if not process_material:
process_material = ShaderMaterial.new()
process_material.shader = preload("../particles/static.gdshader")
if process_material is ShaderMaterial:
process_material.set_shader_parameter("global_transform", item_root.get_global_transform())
particles.set_process_material(process_material)
# TMP: Workaround to get infinite life time.
# Should be fine, but extensive testing is required.
# I can't get particles to restart when using emit_particle() from a script, so it's either
# that, or encoding the transform array in a texture an read that data from the particle
# shader, which is significantly harder.
particles.lifetime = 1.79769e308
# Kill previous particles or new ones will not spawn.
particles.restart()
return particles
# Called from child nodes who affect the rebuild process (like ScatterShape)
# Usually, it would be the Scatter node responsibility to listen to changes from
# the children nodes, but keeping track of the children is annoying (they can
# be moved around from a Scatter node to another, or put under a wrong node, or
# other edge cases).
# So instead, when a child change, it notifies the parent Scatter node through
# this method.
static func request_parent_to_rebuild(node: Node, deferred := true) -> void:
var parent = node.get_parent()
if not parent or not parent.is_inside_tree():
return
if parent and parent is ProtonScatter:
if not parent.is_ready:
return
if deferred:
parent.rebuild.call_deferred(true)
else:
parent.rebuild(true)
### MESH UTILITY ###
# Recursively search for all MeshInstances3D in the node's children and
# returns them all in an array. If node is a MeshInstance, it will also be
# added to the array
static func get_all_mesh_instances_from(node: Node) -> Array[MeshInstance3D]:
var res: Array[MeshInstance3D] = []
if node is MeshInstance3D:
res.push_back(node)
for c in node.get_children():
res.append_array(get_all_mesh_instances_from(c))
return res
static func get_final_material(item: ProtonScatterItem, mi: MeshInstance3D) -> Material:
if item.override_material:
return item.override_material
if mi.material_override:
return mi.material_override
if mi.get_surface_override_material(0):
return mi.get_surface_override_material(0)
return null
# Merge all the MeshInstances from the local node tree into a single MeshInstance.
# /!\ This is a best effort algorithm and will not work in some specific cases. /!\
#
# Mesh resources can have a maximum of 8 surfaces:
# + If less than 8 different surfaces are found across all the MeshInstances,
# this returns a single instance with all the surfaces.
#
# + If more than 8 surfaces are found, but some shares the same material,
# these surfaces will be merged together if there's less than 8 unique materials.
#
# + If there's more than 8 unique materials, everything will be merged into
# a single surface. Material and custom data will NOT be preserved on the new mesh.
#
static func get_merged_meshes_from(item: ProtonScatterItem) -> MeshInstance3D:
if not item:
return null
var source: Node = item.get_item()
if not is_instance_valid(source):
return null
source.transform = Transform3D()
# Get all the mesh instances
var mesh_instances: Array[MeshInstance3D] = get_all_mesh_instances_from(source)
source.queue_free()
if mesh_instances.is_empty():
return null
# If there's only one mesh instance we can reuse it directly if the materials allow it.
if mesh_instances.size() == 1:
# Duplicate the meshinstance, not the mesh resource
var mi: MeshInstance3D = mesh_instances[0].duplicate()
# MI uses a material override, all surface materials will be ignored
if mi.material_override:
return mi
var surface_overrides_count := 0
for i in mi.get_surface_override_material_count():
if mi.get_surface_override_material(i):
surface_overrides_count += 1
# If there's one material override or less, no duplicate mesh is required.
if surface_overrides_count <= 1:
return mi
# Helper lambdas
var get_material_for_surface = func (mi: MeshInstance3D, idx: int) -> Material:
if mi.get_material_override():
return mi.get_material_override()
if mi.get_surface_override_material(idx):
return mi.get_surface_override_material(idx)
if mi.mesh is PrimitiveMesh:
return mi.mesh.get_material()
return mi.mesh.surface_get_material(idx)
# Count how many surfaces / materials there are in the source instances
var total_surfaces := 0
var surfaces_map := {}
# Key: Material
# data: Array[Dictionary]
# "surface": surface index
# "mesh_instance": parent mesh instance
for mi in mesh_instances:
if not mi.mesh:
continue # Should not happen
# Update the total surface count
var surface_count = mi.mesh.get_surface_count()
total_surfaces += surface_count
# Store surfaces in the material indexed dictionary
for surface_index in surface_count:
var material: Material = get_material_for_surface.call(mi, surface_index)
if not material in surfaces_map:
surfaces_map[material] = []
surfaces_map[material].push_back({
"surface": surface_index,
"mesh_instance": mi,
})
# ------
# Less than 8 surfaces, merge in a single MeshInstance
# ------
if total_surfaces <= 8:
var mesh := ImporterMesh.new()
for mi in mesh_instances:
var inverse_transform := mi.transform.affine_inverse()
for surface_index in mi.mesh.get_surface_count():
# Retrieve surface data
var primitive_type = Mesh.PRIMITIVE_TRIANGLES
var format = 0
var arrays := mi.mesh.surface_get_arrays(surface_index)
if mi.mesh is ArrayMesh:
primitive_type = mi.mesh.surface_get_primitive_type(surface_index)
format = mi.mesh.surface_get_format(surface_index) # Preserve custom data format
# Update vertex position based on MeshInstance transform
var vertex_count = arrays[ArrayMesh.ARRAY_VERTEX].size()
var vertex: Vector3
for index in vertex_count:
vertex = arrays[ArrayMesh.ARRAY_VERTEX][index] * inverse_transform
arrays[ArrayMesh.ARRAY_VERTEX][index] = vertex
# Get the material if any
var material: Material = get_material_for_surface.call(mi, surface_index)
# Store updated surface data in the new mesh
mesh.add_surface(primitive_type, arrays, [], {}, material, "", format)
if item.lod_generate:
mesh.generate_lods(item.lod_merge_angle, item.lod_split_angle, [])
var instance := MeshInstance3D.new()
instance.mesh = mesh.get_mesh()
return instance
# ------
# Too many surfaces and materials, merge everything in a single one.
# ------
var total_unique_materials := surfaces_map.size()
if total_unique_materials > 8:
var surface_tool := SurfaceTool.new()
surface_tool.begin(Mesh.PRIMITIVE_TRIANGLES)
for mi in mesh_instances:
var mesh : Mesh = mi.mesh
for surface_i in mesh.get_surface_count():
surface_tool.append_from(mesh, surface_i, mi.transform)
var mesh := ImporterMesh.new()
mesh.add_surface(surface_tool.get_primitive_type(), surface_tool.commit_to_arrays())
if item.lod_generate:
mesh.generate_lods(item.lod_merge_angle, item.lod_split_angle, [])
var instance = MeshInstance3D.new()
instance.mesh = mesh.get_mesh()
return instance
# ------
# Merge surfaces grouped by their materials
# ------
var mesh := ImporterMesh.new()
for material in surfaces_map.keys():
var surface_tool := SurfaceTool.new()
surface_tool.begin(Mesh.PRIMITIVE_TRIANGLES)
var surfaces: Array = surfaces_map[material]
for data in surfaces:
var idx: int = data["surface"]
var mi: MeshInstance3D = data["mesh_instance"]
surface_tool.append_from(mi.mesh, idx, mi.transform)
mesh.add_surface(
surface_tool.get_primitive_type(),
surface_tool.commit_to_arrays(),
[], {},
material)
if item.lod_generate:
mesh.generate_lods(item.lod_merge_angle, item.lod_split_angle, [])
var instance := MeshInstance3D.new()
instance.mesh = mesh.get_mesh()
return instance
static func get_all_static_bodies_from(node: Node) -> Array[StaticBody3D]:
var res: Array[StaticBody3D] = []
if node is StaticBody3D:
res.push_back(node)
for c in node.get_children():
res.append_array(get_all_static_bodies_from(c))
return res
# Grab every static bodies from the source item and merge them in a single
# one with multiple collision shapes.
static func get_collision_data(item: ProtonScatterItem) -> StaticBody3D:
var static_body := StaticBody3D.new()
var source: Node3D = item.get_item()
if not is_instance_valid(source):
return static_body
source.transform = Transform3D()
for body in get_all_static_bodies_from(source):
for child in body.get_children():
if child is CollisionShape3D:
# Don't use reparent() here or the child transform gets reset.
body.remove_child(child)
child.owner = null
static_body.add_child(child)
source.queue_free()
return static_body
static func set_owner_recursive(node: Node, new_owner) -> void:
node.set_owner(new_owner)
if not node.get_scene_file_path().is_empty():
return # Node is an instantiated scene, don't change its children owner.
for c in node.get_children():
set_owner_recursive(c, new_owner)
static func get_aabb_from_transforms(transforms : Array) -> AABB:
if transforms.size() < 1:
return AABB(Vector3.ZERO, Vector3.ZERO)
var aabb = AABB(transforms[0].origin, Vector3.ZERO)
for t in transforms:
aabb = aabb.expand(t.origin)
return aabb
static func set_visibility_layers(node: Node, layers: int) -> void:
if node is VisualInstance3D:
node.layers = layers
for child in node.get_children():
set_visibility_layers(child, layers)

View File

@@ -0,0 +1,66 @@
@tool
extends RefCounted
var list: Array[Transform3D] = []
var max_count := -1
func add(count: int) -> void:
for i in count:
var t := Transform3D()
list.push_back(t)
func append(array: Array[Transform3D]) -> void:
list.append_array(array)
func remove(count: int) -> void:
count = int(max(count, 0)) # Prevent using a negative number
var new_size = max(list.size() - count, 0)
list.resize(new_size)
func resize(count: int) -> void:
if max_count >= 0:
count = int(min(count, max_count))
var current_count = list.size()
if count > current_count:
add(count - current_count)
else:
remove(current_count - count)
# TODO: Faster algorithm probably exists for this, research an alternatives
# if this ever becomes a performance bottleneck.
func shuffle(random_seed := 0) -> void:
var n = list.size()
if n < 2:
return
var rng = RandomNumberGenerator.new()
rng.set_seed(random_seed)
var i = n - 1
var j
var tmp
while i >= 1:
j = rng.randi() % (i + 1)
tmp = list[j]
list[j] = list[i]
list[i] = tmp
i -= 1
func clear() -> void:
list = []
func is_empty() -> bool:
return list.is_empty()
func size() -> int:
return list.size()

View File

@@ -0,0 +1,29 @@
@tool
extends RefCounted
static func get_position_and_normal_at(curve: Curve3D, offset: float) -> Array:
if not curve:
return []
var pos: Vector3 = curve.sample_baked(offset)
var normal := Vector3.ZERO
var pos1
if offset + curve.get_bake_interval() < curve.get_baked_length():
pos1 = curve.sample_baked(offset + curve.get_bake_interval())
normal = (pos1 - pos)
else:
pos1 = curve.sample_baked(offset - curve.get_bake_interval())
normal = (pos - pos1)
return [pos, normal]
static func remove_line_breaks(text: String) -> String:
# Remove tabs
text = text.replace("\t", "")
# Remove line breaks
text = text.replace("\n", " ")
# Remove occasional double space caused by the line above
return text.replace(" ", " ")