diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index 032e3eda0..88d22ecbd 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -113,3 +113,11 @@ v1.8.2
- Add smbpy du and df commands
- Fix fineterrain not included in export for optimize_diskusage=True
- Update mesher_backend config name & default commands
+
+v1.8.3
+- Fix landlab import error message, add no_landlab.gin config
+
+v1.9.0
+- Add CoPlanar indoor constraint, fix backwards tvs/monitors/sinks
+- Fix empty scene / null objects selected during export
+- Add full system visual check / integration script
\ No newline at end of file
diff --git a/infinigen/__init__.py b/infinigen/__init__.py
index 08131be6f..968900e72 100644
--- a/infinigen/__init__.py
+++ b/infinigen/__init__.py
@@ -6,7 +6,7 @@
import logging
from pathlib import Path
-__version__ = "1.8.3"
+__version__ = "1.9.0"
def repo_root():
diff --git a/infinigen/assets/materials/art.py b/infinigen/assets/materials/art.py
index d03d90d11..c82f33432 100644
--- a/infinigen/assets/materials/art.py
+++ b/infinigen/assets/materials/art.py
@@ -78,9 +78,6 @@ def shader_art_composite(nw: NodeWrangler, **kwargs):
return shader_art_composite
- def make_sphere(self):
- return make_sphere()
-
class ArtRug(ArtComposite):
@property
@@ -98,7 +95,3 @@ def apply(obj, selection=None, bbox=(0, 1, 0, 1), scale=None, **kwargs):
if scale is not None:
write_uv(obj, read_uv(obj) * scale)
Art(np.random.randint(1e5)).apply(obj, selection, bbox, **kwargs)
-
-
-def make_sphere():
- return text.make_sphere()
diff --git a/infinigen/core/constraints/constraint_language/__init__.py b/infinigen/core/constraints/constraint_language/__init__.py
index 464fd0a15..e6d445d2a 100644
--- a/infinigen/core/constraints/constraint_language/__init__.py
+++ b/infinigen/core/constraints/constraint_language/__init__.py
@@ -44,6 +44,7 @@
from .relations import (
AnyRelation,
ConnectorType,
+ CoPlanar,
CutFrom,
GeometryRelation,
NegatedRelation,
diff --git a/infinigen/core/constraints/constraint_language/relations.py b/infinigen/core/constraints/constraint_language/relations.py
index 636f9e969..f6cf9c5ee 100644
--- a/infinigen/core/constraints/constraint_language/relations.py
+++ b/infinigen/core/constraints/constraint_language/relations.py
@@ -386,6 +386,17 @@ class SupportedBy(Touching):
__repr__ = no_frozenset_repr
+@dataclass(frozen=True)
+class CoPlanar(GeometryRelation):
+ margin: float = 0
+
+ # rev_normal: if True, align the normals so they face the SAME direction, rather than two planes facing eachother.
+ # typical use is for sink embedded in countertop
+ rev_normal: bool = False
+
+ __repr__ = no_frozenset_repr
+
+
@dataclass(frozen=True)
class StableAgainst(GeometryRelation):
margin: float = 0
@@ -394,6 +405,8 @@ class StableAgainst(GeometryRelation):
# typical use is chair-against-table relation
check_z: bool = True
+ rev_normal: bool = False
+
__repr__ = no_frozenset_repr
diff --git a/infinigen/core/constraints/constraint_language/util.py b/infinigen/core/constraints/constraint_language/util.py
index 390893d9e..a72933039 100644
--- a/infinigen/core/constraints/constraint_language/util.py
+++ b/infinigen/core/constraints/constraint_language/util.py
@@ -213,30 +213,21 @@ def delete_obj(scene, a, delete_blender=True):
scene.delete_geometry(obj_name + "_mesh")
-def global_vertex_coordinates(obj, local_vertex) -> Vector:
- return obj.matrix_world @ local_vertex.co
-
-
-def global_polygon_normal(obj, polygon):
- loc, rot, scale = obj.matrix_world.decompose()
- rot = rot.to_matrix()
- normal = rot @ polygon.normal
- return normal / np.linalg.norm(normal)
-
-
def is_planar(obj, tolerance=1e-6):
if len(obj.data.polygons) != 1:
return False
polygon = obj.data.polygons[0]
- global_normal = global_polygon_normal(obj, polygon)
+ global_normal = butil.global_polygon_normal(obj, polygon)
# Take the first vertex as a reference point on the plane
- ref_vertex = global_vertex_coordinates(obj, obj.data.vertices[polygon.vertices[0]])
+ ref_vertex = butil.global_vertex_coordinates(
+ obj, obj.data.vertices[polygon.vertices[0]]
+ )
# Check if all vertices lie on the plane defined by the reference vertex and the global normal
for vertex in obj.data.vertices:
- distance = (global_vertex_coordinates(obj, vertex) - ref_vertex).dot(
+ distance = (butil.global_vertex_coordinates(obj, vertex) - ref_vertex).dot(
global_normal
)
if not math.isclose(distance, 0, abs_tol=tolerance):
@@ -253,8 +244,12 @@ def planes_parallel(plane_obj_a, plane_obj_b, tolerance=1e-6):
# if not is_planar(plane_obj_a) or not is_planar(plane_obj_b):
# raise ValueError("One or both objects are not planar")
- global_normal_a = global_polygon_normal(plane_obj_a, plane_obj_a.data.polygons[0])
- global_normal_b = global_polygon_normal(plane_obj_b, plane_obj_b.data.polygons[0])
+ global_normal_a = butil.global_polygon_normal(
+ plane_obj_a, plane_obj_a.data.polygons[0]
+ )
+ global_normal_b = butil.global_polygon_normal(
+ plane_obj_b, plane_obj_b.data.polygons[0]
+ )
dot_product = global_normal_a.dot(global_normal_b)
diff --git a/infinigen/core/constraints/evaluator/indoor_util.py b/infinigen/core/constraints/evaluator/indoor_util.py
index df5a2a701..aeef60dd4 100644
--- a/infinigen/core/constraints/evaluator/indoor_util.py
+++ b/infinigen/core/constraints/evaluator/indoor_util.py
@@ -12,6 +12,8 @@
import trimesh
from shapely import LineString, Point
+from infinigen.core.util import blender as butil
+
def meshes_from_names(scene, names):
if isinstance(names, str):
@@ -172,30 +174,21 @@ def delete_obj(a, scene=None):
scene.delete_geometry(obj_name + "_mesh")
-def global_vertex_coordinates(obj, local_vertex):
- return obj.matrix_world @ local_vertex.co
-
-
-def global_polygon_normal(obj, polygon):
- loc, rot, scale = obj.matrix_world.decompose()
- rot = rot.to_matrix()
- normal = rot @ polygon.normal
- return normal / np.linalg.norm(normal)
-
-
def is_planar(obj, tolerance=1e-6):
if len(obj.data.polygons) != 1:
return False
polygon = obj.data.polygons[0]
- global_normal = global_polygon_normal(obj, polygon)
+ global_normal = butil.global_polygon_normal(obj, polygon)
# Take the first vertex as a reference point on the plane
- ref_vertex = global_vertex_coordinates(obj, obj.data.vertices[polygon.vertices[0]])
+ ref_vertex = butil.global_vertex_coordinates(
+ obj, obj.data.vertices[polygon.vertices[0]]
+ )
# Check if all vertices lie on the plane defined by the reference vertex and the global normal
for vertex in obj.data.vertices:
- distance = (global_vertex_coordinates(obj, vertex) - ref_vertex).dot(
+ distance = (butil.global_vertex_coordinates(obj, vertex) - ref_vertex).dot(
global_normal
)
if not math.isclose(distance, 0, abs_tol=tolerance):
@@ -212,8 +205,12 @@ def planes_parallel(plane_obj_a, plane_obj_b, tolerance=1e-6):
# if not is_planar(plane_obj_a) or not is_planar(plane_obj_b):
# raise ValueError("One or both objects are not planar")
- global_normal_a = global_polygon_normal(plane_obj_a, plane_obj_a.data.polygons[0])
- global_normal_b = global_polygon_normal(plane_obj_b, plane_obj_b.data.polygons[0])
+ global_normal_a = butil.global_polygon_normal(
+ plane_obj_a, plane_obj_a.data.polygons[0]
+ )
+ global_normal_b = butil.global_polygon_normal(
+ plane_obj_b, plane_obj_b.data.polygons[0]
+ )
dot_product = global_normal_a.dot(global_normal_b)
@@ -230,12 +227,12 @@ def distance_to_plane(point, plane_point, plane_normal):
def is_within_margin_from_plane(obj, obj_b, margin, tol=1e-6):
"""Check if all vertices of an object are within a given margin from a plane."""
polygon_b = obj_b.data.polygons[0]
- plane_point_b = global_vertex_coordinates(
+ plane_point_b = butil.global_vertex_coordinates(
obj_b, obj_b.data.vertices[polygon_b.vertices[0]]
)
- plane_normal_b = global_polygon_normal(obj_b, polygon_b)
+ plane_normal_b = butil.global_polygon_normal(obj_b, polygon_b)
for vertex in obj.data.vertices:
- global_vertex = global_vertex_coordinates(obj, vertex)
+ global_vertex = butil.global_vertex_coordinates(obj, vertex)
distance = distance_to_plane(global_vertex, plane_point_b, plane_normal_b)
if not math.isclose(distance, margin, abs_tol=tol):
return False
diff --git a/infinigen/core/constraints/evaluator/node_impl/trimesh_geometry.py b/infinigen/core/constraints/evaluator/node_impl/trimesh_geometry.py
index aaf03f759..de1a2c3da 100644
--- a/infinigen/core/constraints/evaluator/node_impl/trimesh_geometry.py
+++ b/infinigen/core/constraints/evaluator/node_impl/trimesh_geometry.py
@@ -31,10 +31,9 @@
from infinigen.core import tags as t
from infinigen.core.constraints.example_solver import state_def
from infinigen.core.constraints.example_solver.geometry.parse_scene import add_to_scene
+from infinigen.core.util import blender as butil
from infinigen.core.util.logging import lazydebug
-# from infinigen.core.util import blender as butil
-
# import fcl
@@ -85,10 +84,10 @@ def get_axis(state: state_def.State, obj: bpy.types.Object, tag=t.Subpart.Front)
a_front_plane = a_front_planes[0]
a_front_plane_ind = a_front_plane[1]
a_poly = obj.data.polygons[a_front_plane_ind]
- front_plane_pt = iu.global_vertex_coordinates(
+ front_plane_pt = butil.global_vertex_coordinates(
obj, obj.data.vertices[a_poly.vertices[0]]
)
- front_plane_normal = iu.global_polygon_normal(obj, a_poly)
+ front_plane_normal = butil.global_polygon_normal(obj, a_poly)
return front_plane_pt, front_plane_normal
diff --git a/infinigen/core/constraints/example_solver/geometry/dof.py b/infinigen/core/constraints/example_solver/geometry/dof.py
index 0faa2a4f7..25e2584c9 100644
--- a/infinigen/core/constraints/example_solver/geometry/dof.py
+++ b/infinigen/core/constraints/example_solver/geometry/dof.py
@@ -107,8 +107,18 @@ def rotate_object_around_axis(obj, axis, std, angle=None):
def check_init_valid(
- state: state_def.State, name: str, obj_planes: list, assigned_planes: list, margins
+ state: state_def.State,
+ name: str,
+ obj_planes: list,
+ assigned_planes: list,
+ margins: list,
+ rev_normals: list[bool],
):
+ """
+ Check that the plane assignments to the object is valid. First checks that the rotations can be satisfied, then
+ checks that the translations can be satisfied. Returns a boolean indicating if the assignments are valid, the number
+ of degrees of freedom remaining, and the translation vector if the assignments are valid.
+ """
if len(obj_planes) == 0:
raise ValueError(f"{check_init_valid.__name__} for {name=} got {obj_planes=}")
if len(obj_planes) > 3:
@@ -117,6 +127,9 @@ def check_init_valid(
)
def get_rot(ind):
+ """
+ Get the rotation axis and angle needed to align the object's plane with the assigned plane.
+ """
try:
a = obj_planes[ind][0]
b = assigned_planes[ind][0]
@@ -125,6 +138,7 @@ def get_rot(ind):
a_plane = obj_planes[ind]
b_plane = assigned_planes[ind]
+ rev_normal = rev_normals[ind]
a_obj = bpy.data.objects[a]
b_obj = bpy.data.objects[b]
@@ -132,8 +146,8 @@ def get_rot(ind):
a_poly = a_obj.data.polygons[a_poly_index]
b_poly_index = b_plane[1]
b_poly = b_obj.data.polygons[b_poly_index]
- plane_normal_a = iu.global_polygon_normal(a_obj, a_poly)
- plane_normal_b = iu.global_polygon_normal(b_obj, b_poly)
+ plane_normal_a = butil.global_polygon_normal(a_obj, a_poly)
+ plane_normal_b = butil.global_polygon_normal(b_obj, b_poly, rev_normal)
plane_normal_b = -plane_normal_b
rotation_axis = np.cross(plane_normal_a, plane_normal_b)
@@ -192,6 +206,7 @@ def is_rotation_allowed(rotation_axis, reference_normal):
a_obj_name, a_poly_index = obj_planes[i]
b_obj_name, b_poly_index = assigned_planes[i]
margin = margins[i]
+ rev_normal = rev_normals[i]
a_obj = bpy.data.objects[a_obj_name]
b_obj = bpy.data.objects[b_obj_name]
@@ -200,13 +215,13 @@ def is_rotation_allowed(rotation_axis, reference_normal):
b_poly = b_obj.data.polygons[b_poly_index]
# Get global coordinates and normals
- plane_point_a = iu.global_vertex_coordinates(
+ plane_point_a = butil.global_vertex_coordinates(
a_obj, a_obj.data.vertices[a_poly.vertices[0]]
)
- plane_point_b = iu.global_vertex_coordinates(
+ plane_point_b = butil.global_vertex_coordinates(
b_obj, b_obj.data.vertices[b_poly.vertices[0]]
)
- plane_normal_b = iu.global_polygon_normal(b_obj, b_poly)
+ plane_normal_b = butil.global_polygon_normal(b_obj, b_poly, rev_normal)
plane_point_b += plane_normal_b * margin
# Append to the matrix A and vector b for Ax = c
@@ -251,20 +266,31 @@ def apply_relations_surfacesample(
state: state_def.State,
name: str,
):
+ """
+ Apply the relation constraints to the object. Place it in the scene according to the constraints.
+ """
+
obj_state = state.objs[name]
obj_name = obj_state.obj.name
+ def relation_sort_key(relation_state):
+ return isinstance(relation_state.relation, cl.CoPlanar)
+
+ obj_state.relations = sorted(obj_state.relations, key=relation_sort_key)
+
parent_objs = []
parent_planes = []
obj_planes = []
margins = []
parent_tag_list = []
+ relations = []
+ rev_normals = []
if len(obj_state.relations) == 0:
raise ValueError(f"Object {name} has no relations")
elif len(obj_state.relations) > 3:
raise ValueError(
- f"Object {name} has more than 2 relations, not supported. {obj_state.relations=}"
+ f"Object {name} has more than 3 relations, not supported. {obj_state.relations=}"
)
for i, relation_state in enumerate(obj_state.relations):
@@ -287,16 +313,34 @@ def apply_relations_surfacesample(
parent_planes.append(parent_plane)
parent_objs.append(parent_obj)
match relation_state.relation:
- case cl.StableAgainst(_child_tags, parent_tags, margin):
+ case cl.StableAgainst(
+ _child_tags, parent_tags, margin, _check_z, rev_normal
+ ):
margins.append(margin)
parent_tag_list.append(parent_tags)
+ relations.append(relation_state.relation)
+ rev_normals.append(rev_normal)
case cl.SupportedBy(_parent_tags, parent_tags):
margins.append(0)
parent_tag_list.append(parent_tags)
+ relations.append(relation_state.relation)
+ rev_normals.append(False)
+ case cl.CoPlanar(_child_tags, parent_tags, margin, rev_normal):
+ margins.append(margin)
+ parent_tag_list.append(parent_tags)
+ relations.append(relation_state.relation)
+ rev_normals.append(rev_normal)
+ case cl.Touching(_child_tags, parent_tags, margin):
+ margins.append(margin)
+ parent_tag_list.append(parent_tags)
+ relations.append(relation_state.relation)
+ rev_normals.append(False)
case _:
raise NotImplementedError
- valid, dof, T = check_init_valid(state, name, obj_planes, parent_planes, margins)
+ valid, dof, T = check_init_valid(
+ state, name, obj_planes, parent_planes, margins, rev_normals
+ )
if not valid:
rels = [(rels.relation, rels.target_name) for rels in obj_state.relations]
logger.warning(f"Init was invalid for {name=} {rels=}")
@@ -317,6 +361,9 @@ def apply_relations_surfacesample(
margin2 = margins[1]
obj_plane1 = obj_planes[0]
obj_plane2 = obj_planes[1]
+ relation2 = relations[1]
+ rev_normal1 = rev_normals[0]
+ rev_normal2 = rev_normals[1]
parent1_trimesh = state.planes.get_tagged_submesh(
state.trimesh_scene, parent_obj1.name, parent_tags1, parent_plane1
@@ -327,7 +374,7 @@ def apply_relations_surfacesample(
parent1_poly_index = parent_plane1[1]
parent1_poly = parent_obj1.data.polygons[parent1_poly_index]
- plane_normal_1 = iu.global_polygon_normal(parent_obj1, parent1_poly)
+ plane_normal_1 = butil.global_polygon_normal(parent_obj1, parent1_poly)
pts = parent2_trimesh.vertices
projected = project(pts, plane_normal_1)
p1_to_p1 = trimesh.path.polygons.projected(
@@ -341,7 +388,7 @@ def apply_relations_surfacesample(
if all(
[p1_to_p1.buffer(1e-1).contains(Point(pt[0], pt[1])) for pt in projected]
- ):
+ ) and (not isinstance(relation2, cl.CoPlanar)):
face_mask = tagging.tagged_face_mask(parent_obj2, parent_tags2)
stability.move_obj_random_pt(
state, obj_name, parent_obj2.name, face_mask, parent_plane2
@@ -353,6 +400,7 @@ def apply_relations_surfacesample(
obj_plane2,
parent_plane2,
margin=margin2,
+ rev_normal=rev_normal2,
)
stability.snap_against(
state.trimesh_scene,
@@ -361,6 +409,7 @@ def apply_relations_surfacesample(
obj_plane1,
parent_plane1,
margin=margin1,
+ rev_normal=rev_normal1,
)
else:
face_mask = tagging.tagged_face_mask(parent_obj1, parent_tags1)
@@ -374,6 +423,7 @@ def apply_relations_surfacesample(
obj_plane1,
parent_plane1,
margin=margin1,
+ rev_normal=rev_normal1,
)
stability.snap_against(
state.trimesh_scene,
@@ -382,10 +432,11 @@ def apply_relations_surfacesample(
obj_plane2,
parent_plane2,
margin=margin2,
+ rev_normal=rev_normal2,
)
elif dof == 2:
- assert len(parent_planes) == 1, (name, len(parent_planes))
+ # assert len(parent_planes) == 1, (name, len(parent_planes))
for i, relation_state in enumerate(obj_state.relations):
parent_obj = state.objs[relation_state.target_name].obj
obj_plane, parent_plane = state.planes.get_rel_state_planes(
@@ -403,11 +454,23 @@ def apply_relations_surfacesample(
face_mask = tagging.tagged_face_mask(
parent_obj, relation_state.relation.parent_tags
)
- stability.move_obj_random_pt(
- state, obj_name, parent_obj.name, face_mask, parent_plane
- )
+
match relation_state.relation:
- case cl.StableAgainst(_, parent_tags, margin):
+ case cl.CoPlanar:
+ stability.snap_against(
+ state.trimesh_scene,
+ obj_name,
+ parent_obj.name,
+ obj_plane,
+ parent_plane,
+ margin=margin,
+ rev_normal=relation_state.relation.rev_normal,
+ )
+
+ case cl.StableAgainst(_, parent_tags, margin, _check_z, rev_normal):
+ stability.move_obj_random_pt(
+ state, obj_name, parent_obj.name, face_mask, parent_plane
+ )
stability.snap_against(
state.trimesh_scene,
obj_name,
@@ -415,8 +478,13 @@ def apply_relations_surfacesample(
obj_plane,
parent_plane,
margin=margin,
+ rev_normal=rev_normal,
)
+
case cl.SupportedBy(_, parent_tags):
+ stability.move_obj_random_pt(
+ state, obj_name, parent_obj.name, face_mask, parent_plane
+ )
stability.snap_against(
state.trimesh_scene,
obj_name,
@@ -424,6 +492,7 @@ def apply_relations_surfacesample(
obj_plane,
parent_plane,
margin=0,
+ rev_normal=False,
)
case _:
raise NotImplementedError
@@ -470,6 +539,7 @@ def try_apply_relation_constraints(
if visualize:
vis = butil.copy(obj_state.obj)
vis.name = obj_state.obj.name[:30] + "_noneplanes_" + str(retry)
+ butil.save_blend("test.blend")
return False
if validity.check_post_move_validity(state, name):
@@ -480,8 +550,7 @@ def try_apply_relation_constraints(
if visualize:
vis = butil.copy(obj_state.obj)
vis.name = obj_state.obj.name[:30] + "_failure_" + str(retry)
-
- # butil.save_blend("test.blend")
+ butil.save_blend("test.blend")
logger.debug(f"Exhausted {n_try_resolve=} tries for {name=}")
return False
diff --git a/infinigen/core/constraints/example_solver/geometry/planes.py b/infinigen/core/constraints/example_solver/geometry/planes.py
index e01870e93..0c9b3a600 100644
--- a/infinigen/core/constraints/example_solver/geometry/planes.py
+++ b/infinigen/core/constraints/example_solver/geometry/planes.py
@@ -23,22 +23,6 @@
logger = logging.getLogger(__name__)
-def global_vertex_coordinates(obj, local_vertex):
- return obj.matrix_world @ local_vertex.co
-
-
-def global_polygon_normal(obj, polygon):
- loc, rot, scale = obj.matrix_world.decompose()
- rot = rot.to_matrix()
- normal = rot @ polygon.normal
- try:
- return normal / np.linalg.norm(normal)
- except ZeroDivisionError:
- raise ZeroDivisionError(
- f"Zero division error in global_polygon_normal for {obj.name=}, {polygon.index=}, {normal=}"
- )
-
-
class Planes:
def __init__(self):
self._mesh_hashes = {} # Dictionary to store mesh hashes for each object
@@ -97,10 +81,10 @@ def compute_all_planes_fast(self, obj, face_mask, tolerance=1e-4):
# Cache computations
vertex_cache = {
- v.index: global_vertex_coordinates(obj, v) for v in obj.data.vertices
+ v.index: butil.global_vertex_coordinates(obj, v) for v in obj.data.vertices
}
normal_cache = {
- p.index: global_polygon_normal(obj, p)
+ p.index: butil.global_polygon_normal(obj, p)
for p in obj.data.polygons
if face_mask[p.index]
}
@@ -136,17 +120,17 @@ def get_all_planes_deprecated(
for polygon in obj.data.polygons:
if not face_mask[polygon.index]:
continue
- vertex = global_vertex_coordinates(
+ vertex = butil.global_vertex_coordinates(
obj, obj.data.vertices[polygon.vertices[0]]
)
- normal = global_polygon_normal(obj, polygon)
+ normal = butil.global_polygon_normal(obj, polygon)
belongs_to_existing_plane = False
for name, polygon2_index in unique_planes:
polygon2 = obj.data.polygons[polygon2_index]
- plane_vertex = global_vertex_coordinates(
+ plane_vertex = butil.global_vertex_coordinates(
obj, obj.data.vertices[polygon2.vertices[0]]
)
- plane_normal = global_polygon_normal(obj, polygon2)
+ plane_normal = butil.global_polygon_normal(obj, polygon2)
if np.allclose(
np.cross(normal, plane_normal), 0, rtol=tolerance
) and np.allclose(
@@ -291,10 +275,10 @@ def tagged_plane_mask(
current_hash = self.calculate_mesh_hash(obj) # Calculate current mesh hash
face_mask_hash = self.hash_face_mask(face_mask) # Calculate hash for face_mask
ref_poly = self.planerep_to_poly(plane)
- ref_vertex = global_vertex_coordinates(
+ ref_vertex = butil.global_vertex_coordinates(
obj, obj.data.vertices[ref_poly.vertices[0]]
)
- ref_normal = global_polygon_normal(obj, ref_poly)
+ ref_normal = butil.global_polygon_normal(obj, ref_poly)
plane_hash = self.hash_plane(
ref_normal, ref_vertex, hash_tolerance
) # Calculate hash for plane
@@ -334,19 +318,19 @@ def _compute_tagged_plane_mask(self, obj, face_mask, plane, tolerance):
"""
plane_mask = np.zeros(len(obj.data.polygons), dtype=bool)
ref_poly = self.planerep_to_poly(plane)
- ref_vertex = global_vertex_coordinates(
+ ref_vertex = butil.global_vertex_coordinates(
obj, obj.data.vertices[ref_poly.vertices[0]]
)
- ref_normal = global_polygon_normal(obj, ref_poly)
+ ref_normal = butil.global_polygon_normal(obj, ref_poly)
for candidate_polygon in obj.data.polygons:
if not face_mask[candidate_polygon.index]:
continue
- candidate_vertex = global_vertex_coordinates(
+ candidate_vertex = butil.global_vertex_coordinates(
obj, obj.data.vertices[candidate_polygon.vertices[0]]
)
- candidate_normal = global_polygon_normal(obj, candidate_polygon)
+ candidate_normal = butil.global_polygon_normal(obj, candidate_polygon)
diff_vec = ref_vertex - candidate_vertex
if not np.isclose(np.linalg.norm(diff_vec), 0):
diff_vec /= np.linalg.norm(diff_vec)
diff --git a/infinigen/core/constraints/example_solver/geometry/stability.py b/infinigen/core/constraints/example_solver/geometry/stability.py
index c42b28b1d..5d76c0af0 100644
--- a/infinigen/core/constraints/example_solver/geometry/stability.py
+++ b/infinigen/core/constraints/example_solver/geometry/stability.py
@@ -102,7 +102,7 @@ def stable_against(
relation = relation_state.relation
assert isinstance(relation, cl.StableAgainst)
- logger.debug(f"stable against {obj_name=} {relation_state=}")
+ logger.debug(f"stable against {obj_name=} {relation_state=} {relation.rev_normal=}")
a_blender_obj = state.objs[obj_name].obj
b_blender_obj = state.objs[relation_state.target_name].obj
sa = state.objs[obj_name]
@@ -113,14 +113,16 @@ def stable_against(
poly_a = state.planes.planerep_to_poly(pa)
poly_b = state.planes.planerep_to_poly(pb)
- normal_a = iu.global_polygon_normal(a_blender_obj, poly_a)
- normal_b = iu.global_polygon_normal(b_blender_obj, poly_b)
+ normal_a = butil.global_polygon_normal(a_blender_obj, poly_a)
+ normal_b = butil.global_polygon_normal(
+ b_blender_obj, poly_b, rev_normal=relation.rev_normal
+ )
dot = np.array(normal_a).dot(normal_b)
if not (np.isclose(np.abs(dot), 1, atol=1e-2) or np.isclose(dot, -1, atol=1e-2)):
logger.debug(f"stable against failed, not parallel {dot=}")
return False
- origin_b = iu.global_vertex_coordinates(
+ origin_b = butil.global_vertex_coordinates(
b_blender_obj, b_blender_obj.data.vertices[poly_b.vertices[0]]
)
@@ -166,7 +168,7 @@ def stable_against(
return False
for vertex in poly_a.vertices:
- vertex_global = iu.global_vertex_coordinates(
+ vertex_global = butil.global_vertex_coordinates(
a_blender_obj, a_blender_obj.data.vertices[vertex]
)
distance = iu.distance_to_plane(vertex_global, origin_b, normal_b)
@@ -177,11 +179,58 @@ def stable_against(
return True
-def snap_against(scene, a, b, a_plane, b_plane, margin=0):
+@gin.configurable
+def coplanar(
+ state: state_def.State,
+ obj_name: str,
+ relation_state: state_def.RelationState,
+):
+ """
+ check that the object's tagged surface is coplanar with the target object's tagged surface translated with margin.
+ """
+
+ relation = relation_state.relation
+ assert isinstance(relation, cl.CoPlanar)
+
+ logger.debug(f"coplanar {obj_name=} {relation_state=}")
+ a_blender_obj = state.objs[obj_name].obj
+ b_blender_obj = state.objs[relation_state.target_name].obj
+
+ pa, pb = state.planes.get_rel_state_planes(state, obj_name, relation_state)
+
+ poly_a = state.planes.planerep_to_poly(pa)
+ poly_b = state.planes.planerep_to_poly(pb)
+
+ normal_a = butil.global_polygon_normal(a_blender_obj, poly_a)
+ normal_b = butil.global_polygon_normal(
+ b_blender_obj, poly_b, rev_normal=relation.rev_normal
+ )
+ dot = np.array(normal_a).dot(normal_b)
+ if not (np.isclose(np.abs(dot), 1, atol=1e-2) or np.isclose(dot, -1, atol=1e-2)):
+ logger.debug(f"coplanar failed, not parallel {dot=}")
+ return False
+
+ origin_b = butil.global_vertex_coordinates(
+ b_blender_obj, b_blender_obj.data.vertices[poly_b.vertices[0]]
+ )
+
+ for vertex in poly_a.vertices:
+ vertex_global = butil.global_vertex_coordinates(
+ a_blender_obj, a_blender_obj.data.vertices[vertex]
+ )
+ distance = iu.distance_to_plane(vertex_global, origin_b, normal_b)
+ if not np.isclose(distance, relation_state.relation.margin, atol=1e-2):
+ logger.debug(f"coplanar failed, not close to {distance=}")
+ return False
+
+ return True
+
+
+def snap_against(scene, a, b, a_plane, b_plane, margin=0, rev_normal=False):
"""
snap a against b with some margin.
"""
- logging.debug("snap_against", a, b, a_plane, b_plane, margin)
+ logging.debug("snap_against", a, b, a_plane, b_plane, margin, rev_normal)
a_obj = bpy.data.objects[a]
b_obj = bpy.data.objects[b]
@@ -190,14 +239,14 @@ def snap_against(scene, a, b, a_plane, b_plane, margin=0):
a_poly = a_obj.data.polygons[a_poly_index]
b_poly_index = b_plane[1]
b_poly = b_obj.data.polygons[b_poly_index]
- plane_point_a = iu.global_vertex_coordinates(
+ plane_point_a = butil.global_vertex_coordinates(
a_obj, a_obj.data.vertices[a_poly.vertices[0]]
)
- plane_normal_a = iu.global_polygon_normal(a_obj, a_poly)
- plane_point_b = iu.global_vertex_coordinates(
+ plane_normal_a = butil.global_polygon_normal(a_obj, a_poly)
+ plane_point_b = butil.global_vertex_coordinates(
b_obj, b_obj.data.vertices[b_poly.vertices[0]]
)
- plane_normal_b = iu.global_polygon_normal(b_obj, b_poly)
+ plane_normal_b = butil.global_polygon_normal(b_obj, b_poly, rev_normal)
plane_normal_b = -plane_normal_b
norm_mag_a = np.linalg.norm(plane_normal_a)
@@ -220,10 +269,10 @@ def snap_against(scene, a, b, a_plane, b_plane, margin=0):
a_obj = bpy.data.objects[a]
a_poly = a_obj.data.polygons[a_poly_index]
# Recalculate vertex_a and normal_a after rotation
- plane_point_a = iu.global_vertex_coordinates(
+ plane_point_a = butil.global_vertex_coordinates(
a_obj, a_obj.data.vertices[a_poly.vertices[0]]
)
- plane_normal_a = iu.global_polygon_normal(a_obj, a_poly)
+ plane_normal_a = butil.global_polygon_normal(a_obj, a_poly)
distance = (plane_point_a - plane_point_b).dot(plane_normal_b)
diff --git a/infinigen/core/constraints/example_solver/geometry/validity.py b/infinigen/core/constraints/example_solver/geometry/validity.py
index a38f8697c..e348fcc21 100644
--- a/infinigen/core/constraints/example_solver/geometry/validity.py
+++ b/infinigen/core/constraints/example_solver/geometry/validity.py
@@ -19,7 +19,10 @@
any_touching,
constrain_contact,
)
-from infinigen.core.constraints.example_solver.geometry.stability import stable_against
+from infinigen.core.constraints.example_solver.geometry.stability import (
+ coplanar,
+ stable_against,
+)
from infinigen.core.constraints.example_solver.state_def import State
from infinigen.core.util import blender as butil
@@ -76,6 +79,14 @@ def all_relations_valid(state, name):
f"{name} failed relation {i=}/{len(rels)} {relation_state.relation} on {relation_state.target_name}"
)
return False
+
+ case cl.CoPlanar(_child_tags, _parent_tags, _margin):
+ res = coplanar(state, name, relation_state)
+ if not res:
+ logger.debug(
+ f"{name} failed relation {i=}/{len(rels)} {relation_state.relation} on {relation_state.target_name}"
+ )
+ return False
case _:
raise TypeError(f"Unhandled {relation_state.relation}")
diff --git a/infinigen/core/constraints/example_solver/room/decorate.py b/infinigen/core/constraints/example_solver/room/decorate.py
index b77a826a3..909c04a44 100644
--- a/infinigen/core/constraints/example_solver/room/decorate.py
+++ b/infinigen/core/constraints/example_solver/room/decorate.py
@@ -184,34 +184,6 @@ def import_material(factory_name):
(2, "plaster"),
(1, "half"),
),
- t.Semantics.Office: (
- "weighted_choice",
- (2, "none"),
- (2, "art"),
- (2, "plaster"),
- (1, "half"),
- ),
- t.Semantics.OpenOffice: (
- "weighted_choice",
- (2, "none"),
- (2, "art"),
- (2, "plaster"),
- (1, "half"),
- ),
- t.Semantics.FactoryOffice: (
- "weighted_choice",
- (2, "none"),
- (2, "art"),
- (2, "plaster"),
- (1, "half"),
- ),
- t.Semantics.BreakRoom: (
- "weighted_choice",
- (2, "none"),
- (2, "art"),
- (2, "plaster"),
- (1, "half"),
- ),
}
room_wall_alternative_fns = defaultdict(
lambda: ("weighted_choice", (2, "none"), (0.5, "half")), room_wall_alternative_fns
diff --git a/infinigen/core/execute_tasks.py b/infinigen/core/execute_tasks.py
index ba1755f61..e741d017e 100644
--- a/infinigen/core/execute_tasks.py
+++ b/infinigen/core/execute_tasks.py
@@ -308,7 +308,10 @@ def execute_tasks(
col.hide_viewport = False
if need_terrain_processing and (
- Task.Render in task or Task.GroundTruth in task or Task.MeshSave in task or Task.Export in task
+ Task.Render in task
+ or Task.GroundTruth in task
+ or Task.MeshSave in task
+ or Task.Export in task
):
terrain = Terrain(
scene_seed,
diff --git a/infinigen/core/tagging.py b/infinigen/core/tagging.py
index 37079f9f3..c660869ef 100644
--- a/infinigen/core/tagging.py
+++ b/infinigen/core/tagging.py
@@ -463,12 +463,6 @@ def tag_support_surfaces(obj, angle_threshold=0.1):
angle_threshold (float): The cosine of the maximum angle deviation from +z to be considered a support surface.
"""
- def global_polygon_normal(obj, polygon):
- loc, rot, scale = obj.matrix_world.decompose()
- rot = rot.to_matrix()
- normal = rot @ polygon.normal
- return normal / np.linalg.norm(normal)
-
def process_mesh(mesh_obj):
up_vector = Vector((0, 0, 1))
@@ -476,7 +470,7 @@ def process_mesh(mesh_obj):
support_mask = np.zeros(n_poly, dtype=bool)
for poly in mesh_obj.data.polygons:
- global_normal = global_polygon_normal(mesh_obj, poly)
+ global_normal = butil.global_polygon_normal(mesh_obj, poly)
if global_normal.dot(up_vector) > 1 - angle_threshold:
support_mask[poly.index] = True
diff --git a/infinigen/core/util/blender.py b/infinigen/core/util/blender.py
index 41b9ad4e4..20912b2f5 100644
--- a/infinigen/core/util/blender.py
+++ b/infinigen/core/util/blender.py
@@ -17,6 +17,7 @@
import mathutils
import numpy as np
import trimesh
+from mathutils import Vector
from tqdm import tqdm
from infinigen.core.nodes.node_info import DATATYPE_DIMS, DATATYPE_FIELDS
@@ -1011,3 +1012,16 @@ def purge_empty_materials(obj):
continue
bpy.context.object.active_material_index = i
bpy.ops.object.material_slot_remove()
+
+
+def global_polygon_normal(obj, polygon, rev_normal=False):
+ loc, rot, scale = obj.matrix_world.decompose()
+ rot = rot.to_matrix()
+ normal = rot @ polygon.normal
+ if rev_normal:
+ normal = -normal
+ return normal / np.linalg.norm(normal)
+
+
+def global_vertex_coordinates(obj, local_vertex) -> Vector:
+ return obj.matrix_world @ local_vertex.co
diff --git a/infinigen/core/util/logging.py b/infinigen/core/util/logging.py
index a0deaae66..77052cc9d 100644
--- a/infinigen/core/util/logging.py
+++ b/infinigen/core/util/logging.py
@@ -94,7 +94,7 @@ def save_polycounts(file):
)
file.write(f"{col.name}: {polycount:,}\n")
for stat in bpy.context.scene.statistics(bpy.context.view_layer).split(" | ")[2:]:
- file.write(stat)
+ file.write(stat + "\n")
@gin.configurable
diff --git a/infinigen/datagen/manage_jobs.py b/infinigen/datagen/manage_jobs.py
index f159e9ffe..407ea0307 100644
--- a/infinigen/datagen/manage_jobs.py
+++ b/infinigen/datagen/manage_jobs.py
@@ -108,6 +108,7 @@ def slurm_submit_cmd(
gpus=0,
hours=1,
slurm_account=None,
+ slurm_nodelist=None,
slurm_partition=None,
slurm_exclude: list = None,
slurm_niceness=None,
@@ -129,6 +130,7 @@ def slurm_submit_cmd(
if gpus > 0:
executor.update_parameters(gpus_per_node=gpus)
+
if slurm_account is not None:
if slurm_account == f"ENVVAR_{PARTITION_ENVVAR}":
slurm_account = os.environ.get(PARTITION_ENVVAR)
@@ -150,6 +152,9 @@ def slurm_submit_cmd(
if slurm_partition is not None:
slurm_additional_params["partition"] = slurm_partition
+ if slurm_nodelist is not None:
+ slurm_additional_params["nodelist"] = slurm_nodelist
+
executor.update_parameters(slurm_additional_parameters=slurm_additional_params)
while True:
diff --git a/infinigen/datagen/monitor_tasks.py b/infinigen/datagen/monitor_tasks.py
index 7dac00e1e..8053130b5 100644
--- a/infinigen/datagen/monitor_tasks.py
+++ b/infinigen/datagen/monitor_tasks.py
@@ -150,14 +150,13 @@ def iterate_scene_tasks(
# even if we wouldnt launch them now (due to crashes etc)
monitor_all,
# provided by gin
- global_tasks,
- view_dependent_tasks,
- camera_dependent_tasks,
-
- frame_range,
- cam_id_ranges,
- point_trajectory_src_frame=1,
- num_resamples=1,
+ global_tasks,
+ view_dependent_tasks,
+ camera_dependent_tasks,
+ frame_range,
+ cam_id_ranges,
+ point_trajectory_src_frame=1,
+ num_resamples=1,
render_frame_range=None,
finalize_tasks=[],
view_block_size=1, # how many frames should share each `view_dependent_task`
@@ -262,10 +261,10 @@ def iterate_scene_tasks(
min(view_frame_range[1], cam_frame + cam_block_size - 1),
] # blender frame_end is INCLUSIVE
cam_overrides = [
- f'execute_tasks.frame_range=[{cam_frame_range[0]},{cam_frame_range[1]}]',
- f'execute_tasks.camera_id=[{cam_rig},{subcam}]',
- f'execute_tasks.resample_idx={resample_idx}',
- f'execute_tasks.point_trajectory_src_frame={point_trajectory_src_frame}',
+ f"execute_tasks.frame_range=[{cam_frame_range[0]},{cam_frame_range[1]}]",
+ f"execute_tasks.camera_id=[{cam_rig},{subcam}]",
+ f"execute_tasks.resample_idx={resample_idx}",
+ f"execute_tasks.point_trajectory_src_frame={point_trajectory_src_frame}",
]
camdep_indices = dict(
diff --git a/infinigen/datagen/util/template.html b/infinigen/datagen/util/template.html
index 83c79562d..9979de135 100644
--- a/infinigen/datagen/util/template.html
+++ b/infinigen/datagen/util/template.html
@@ -18,24 +18,24 @@
{{seed}}
diff --git a/infinigen/terrain/core.py b/infinigen/terrain/core.py
index b6ba877cd..a08dc10a4 100644
--- a/infinigen/terrain/core.py
+++ b/infinigen/terrain/core.py
@@ -222,7 +222,7 @@ def export(
if mesher_backend == "SphericalMesher":
mesher = OpaqueSphericalMesher(cameras, self.bounds)
elif mesher_backend == "OcMesher":
- mesher = OcMesher(cameras, self.bounds)
+ mesher = OcMesher(cameras, self.bounds)
elif mesher_backend == "UniformMesher":
mesher = UniformMesher(self.populated_bounds)
else:
@@ -419,7 +419,9 @@ def coarse_terrain(self):
self.surfaces_into_sdf()
# do second time to avoid surface application difference resulting in floating rocks
- coarse_meshes, _ = self.export(main_terrain_only=True, mesher_backend="UniformMesher")
+ coarse_meshes, _ = self.export(
+ main_terrain_only=True, mesher_backend="UniformMesher"
+ )
main_mesh = coarse_meshes[self.main_terrain]
# WaterCovered annotation
@@ -447,7 +449,13 @@ def coarse_terrain(self):
return main_obj
@gin.configurable
- def fine_terrain(self, output_folder, cameras, optimize_terrain_diskusage=True, mesher_backend="SphericalMesher"):
+ def fine_terrain(
+ self,
+ output_folder,
+ cameras,
+ optimize_terrain_diskusage=True,
+ mesher_backend="SphericalMesher",
+ ):
# redo sampling to achieve attribute -> surface correspondance
self.sample_surface_templates()
if (self.on_the_fly_asset_folder / Assets.Ocean).exists():
diff --git a/infinigen/terrain/mesher/spherical_mesher.py b/infinigen/terrain/mesher/spherical_mesher.py
index 2e47b1ceb..5dc15f20f 100644
--- a/infinigen/terrain/mesher/spherical_mesher.py
+++ b/infinigen/terrain/mesher/spherical_mesher.py
@@ -48,7 +48,7 @@ def __init__(
cams = full_info[0]
assert (
self.fov[0] < np.pi / 2 and self.fov[1] < np.pi / 2
- ), "`mesher_backend=SphericalMesher` does not support larger-than-90-degree fov yet. Please add `fine_terrain.mesher_backend = \"OcMesher\"` to your gin config."
+ ), '`mesher_backend=SphericalMesher` does not support larger-than-90-degree fov yet. Please add `fine_terrain.mesher_backend = "OcMesher"` to your gin config.'
self.r_min = r_min
self.complete_depth_test = complete_depth_test
self.bounds = bounds
diff --git a/infinigen/tools/export.py b/infinigen/tools/export.py
index d865d4e05..654eb863f 100644
--- a/infinigen/tools/export.py
+++ b/infinigen/tools/export.py
@@ -13,6 +13,8 @@
import bpy
import gin
+from infinigen.core.util import blender as butil
+
FORMAT_CHOICES = ["fbx", "obj", "usdc", "usda", "stl", "ply"]
BAKE_TYPES = {
"DIFFUSE": "Base Color",
@@ -104,6 +106,8 @@ def handle_geo_modifiers(obj, export_usd):
def split_glass_mats():
split_objs = []
for obj in bpy.data.objects:
+ if obj.hide_render or obj.hide_viewport:
+ continue
if any(
exclude in obj.name
for exclude in ["BowlFactory", "CupFactory", "OvenFactory", "BottleFactory"]
@@ -251,6 +255,7 @@ def update_visibility():
obj_view[obj] = obj.hide_render
obj.hide_viewport = True
obj.hide_render = True
+ obj.hide_set(0)
return collection_view, obj_view
@@ -647,28 +652,23 @@ def bake_object(obj, dest, img_size, export_usd):
return
bpy.ops.object.select_all(action="DESELECT")
- obj.select_set(True)
-
- for slot in obj.material_slots:
- mat = slot.material
- if mat is not None:
- slot.material = (
- mat.copy()
- ) # we duplicate in the case of distinct meshes sharing materials
-
- process_glass_materials(obj, export_usd)
- bake_metal(obj, dest, img_size, export_usd)
- bake_normals(obj, dest, img_size, export_usd)
-
- paramDict = process_interfering_params(obj)
-
- for bake_type in BAKE_TYPES:
- bake_pass(obj, dest, img_size, bake_type, export_usd)
+ with butil.SelectObjects(obj):
+ for slot in obj.material_slots:
+ mat = slot.material
+ if mat is not None:
+ slot.material = (
+ mat.copy()
+ ) # we duplicate in the case of distinct meshes sharing materials
- apply_baked_tex(obj, paramDict)
+ process_glass_materials(obj, export_usd)
+ bake_metal(obj, dest, img_size, export_usd)
+ bake_normals(obj, dest, img_size, export_usd)
+ paramDict = process_interfering_params(obj)
+ for bake_type in BAKE_TYPES:
+ bake_pass(obj, dest, img_size, bake_type, export_usd)
- obj.select_set(False)
+ apply_baked_tex(obj, paramDict)
def bake_scene(folderPath: Path, image_res, vertex_colors, export_usd):
@@ -989,6 +989,8 @@ def main(args):
print(f"Skipping non-blend file {blendfile}")
continue
+ bpy.ops.wm.open_mainfile(filepath=str(blendfile))
+
folder = export_scene(
blendfile,
args.output_folder,
diff --git a/infinigen/tools/results/visualize_traj.py b/infinigen/tools/results/visualize_traj.py
index fe8d18e55..b5c09dad9 100644
--- a/infinigen/tools/results/visualize_traj.py
+++ b/infinigen/tools/results/visualize_traj.py
@@ -39,7 +39,7 @@
def gif_and_tile(ims, just_gif=False):
- S = len(ims)
+ S = len(ims)
# each im is B x H x W x C
# i want a gif in the left, and the tiled frames on the right
# for the gif tool, this means making a B x S x H x W tensor
@@ -54,11 +54,13 @@ def gif_and_tile(ims, just_gif=False):
COLORMAP_FILE = "bremm.png"
+
+
class ColorMap2d:
def __init__(self, filename=None):
self._colormap_file = filename or COLORMAP_FILE
self._img = plt.imread(self._colormap_file)
-
+
self._height = self._img.shape[0]
self._width = self._img.shape[1]
@@ -67,82 +69,128 @@ def __call__(self, X):
output = np.zeros((X.shape[0], 3))
for i in range(X.shape[0]):
x, y = X[i, :]
- xp = int((self._width-1) * x)
- yp = int((self._height-1) * y)
- xp = np.clip(xp, 0, self._width-1)
- yp = np.clip(yp, 0, self._height-1)
+ xp = int((self._width - 1) * x)
+ yp = int((self._height - 1) * y)
+ xp = np.clip(xp, 0, self._width - 1)
+ yp = np.clip(yp, 0, self._height - 1)
output[i, :] = self._img[yp, xp]
return output
-
+
class Summ_writer(object):
- def __init__(self, writer=None, global_step=0, log_freq=10, fps=8, scalar_freq=100, just_gif=False):
+ def __init__(
+ self,
+ writer=None,
+ global_step=0,
+ log_freq=10,
+ fps=8,
+ scalar_freq=100,
+ just_gif=False,
+ ):
self.writer = writer
self.global_step = global_step
self.log_freq = log_freq
self.fps = fps
self.just_gif = just_gif
self.maxwidth = 10000
- self.save_this = (self.global_step % self.log_freq == 0)
- self.scalar_freq = max(scalar_freq,1)
-
-
- def summ_boxlist2d(self, name, rgb, boxlist, scores=None, tids=None, frame_id=None, only_return=False, linewidth=2):
+ self.save_this = self.global_step % self.log_freq == 0
+ self.scalar_freq = max(scalar_freq, 1)
+
+ def summ_boxlist2d(
+ self,
+ name,
+ rgb,
+ boxlist,
+ scores=None,
+ tids=None,
+ frame_id=None,
+ only_return=False,
+ linewidth=2,
+ ):
B, C, H, W = list(rgb.shape)
- boxlist_vis = self.draw_boxlist2d_on_image(rgb, boxlist, scores=scores, tids=tids, linewidth=linewidth)
- return self.summ_rgb(name, boxlist_vis, frame_id=frame_id, only_return=only_return)
-
- def summ_rgbs(self, name, ims, frame_ids=None, blacken_zeros=False, only_return=False):
+ boxlist_vis = self.draw_boxlist2d_on_image(
+ rgb, boxlist, scores=scores, tids=tids, linewidth=linewidth
+ )
+ return self.summ_rgb(
+ name, boxlist_vis, frame_id=frame_id, only_return=only_return
+ )
+
+ def summ_rgbs(
+ self, name, ims, frame_ids=None, blacken_zeros=False, only_return=False
+ ):
if self.save_this:
ims = gif_and_tile(ims, just_gif=self.just_gif)
vis = ims
- assert vis.dtype in {torch.uint8,torch.float32}
+ assert vis.dtype in {torch.uint8, torch.float32}
B, S, C, H, W = list(vis.shape)
if int(W) > self.maxwidth:
- vis = vis[:,:,:,:self.maxwidth]
+ vis = vis[:, :, :, : self.maxwidth]
if only_return:
return vis
else:
pass
- def draw_traj_on_image_py(self, rgb, traj, S=50, linewidth=1, show_dots=False, show_lines=True, cmap='coolwarm', val=None, maxdist=None):
+ def draw_traj_on_image_py(
+ self,
+ rgb,
+ traj,
+ S=50,
+ linewidth=1,
+ show_dots=False,
+ show_lines=True,
+ cmap="coolwarm",
+ val=None,
+ maxdist=None,
+ ):
# all inputs are numpy tensors
# rgb is 3 x H x W
# traj is S x 2
-
+
H, W, C = rgb.shape
- assert(C==3)
+ assert C == 3
rgb = rgb.astype(np.uint8).copy()
S1, D = traj.shape
- assert(D==2)
+ assert D == 2
color_map = cm.get_cmap(cmap)
S1, D = traj.shape
for s in range(S1):
if val is not None:
- color = np.array(color_map(val[s])[:3]) * 255 # rgb
+ color = np.array(color_map(val[s])[:3]) * 255 # rgb
else:
if maxdist is not None:
- val = (np.sqrt(np.sum((traj[s]-traj[0])**2))/maxdist).clip(0,1)
- color = np.array(color_map(val)[:3]) * 255 # rgb
+ val = (np.sqrt(np.sum((traj[s] - traj[0]) ** 2)) / maxdist).clip(
+ 0, 1
+ )
+ color = np.array(color_map(val)[:3]) * 255 # rgb
else:
- color = np.array(color_map((s)/max(1,float(S-2)))[:3]) * 255 # rgb
-
- if show_lines and s<(S1-1):
- cv2.line(rgb,
- (int(traj[s,0]), int(traj[s,1])),
- (int(traj[s+1,0]), int(traj[s+1,1])),
- color,
- linewidth,
- cv2.LINE_AA)
+ color = (
+ np.array(color_map((s) / max(1, float(S - 2)))[:3]) * 255
+ ) # rgb
+
+ if show_lines and s < (S1 - 1):
+ cv2.line(
+ rgb,
+ (int(traj[s, 0]), int(traj[s, 1])),
+ (int(traj[s + 1, 0]), int(traj[s + 1, 1])),
+ color,
+ linewidth,
+ cv2.LINE_AA,
+ )
if show_dots:
- cv2.circle(rgb, (int(traj[s,0]), int(traj[s,1])), linewidth, np.array(color_map(1)[:3])*255, -1)
+ cv2.circle(
+ rgb,
+ (int(traj[s, 0]), int(traj[s, 1])),
+ linewidth,
+ np.array(color_map(1)[:3]) * 255,
+ -1,
+ )
# if maxdist is not None:
# val = (np.sqrt(np.sum((traj[-1]-traj[0])**2))/maxdist).clip(0,1)
@@ -156,52 +204,63 @@ def draw_traj_on_image_py(self, rgb, traj, S=50, linewidth=1, show_dots=False, s
return rgb
-
- def summ_traj2ds_on_rgbs(self, name, trajs, rgbs, valids=None, frame_ids=None, only_return=False, show_dots=False, cmap='coolwarm', vals=None, linewidth=1):
+ def summ_traj2ds_on_rgbs(
+ self,
+ name,
+ trajs,
+ rgbs,
+ valids=None,
+ frame_ids=None,
+ only_return=False,
+ show_dots=False,
+ cmap="coolwarm",
+ vals=None,
+ linewidth=1,
+ ):
# trajs is B, S, N, 2
# rgbs is B, S, C, H, W
B, S, C, H, W = rgbs.shape
B, S2, N, D = trajs.shape
- assert(S==S2)
+ assert S == S2
- rgbs = rgbs[0] # S, C, H, W
- trajs = trajs[0] # S, N, 2
+ rgbs = rgbs[0] # S, C, H, W
+ trajs = trajs[0] # S, N, 2
if valids is None:
- valids = torch.ones_like(trajs[:,:,0]) # S, N
+ valids = torch.ones_like(trajs[:, :, 0]) # S, N
else:
valids = valids[0]
# print('trajs', trajs.shape)
# print('valids', valids.shape)
-
+
if vals is not None:
- vals = vals[0] # N
+ vals = vals[0] # N
# print('vals', vals.shape)
rgbs_color = []
for rgb in rgbs:
- rgb = rgb.numpy()
- rgb = np.transpose(rgb, [1, 2, 0]) # put channels last
- rgbs_color.append(rgb) # each element 3 x H x W
+ rgb = rgb.numpy()
+ rgb = np.transpose(rgb, [1, 2, 0]) # put channels last
+ rgbs_color.append(rgb) # each element 3 x H x W
for i in range(N):
- if cmap=='onediff' and i==0:
- cmap_ = 'spring'
- elif cmap=='onediff':
- cmap_ = 'winter'
+ if cmap == "onediff" and i == 0:
+ cmap_ = "spring"
+ elif cmap == "onediff":
+ cmap_ = "winter"
else:
cmap_ = cmap
- traj = trajs[:,i].long().detach().cpu().numpy() # S, 2
- valid = valids[:,i].long().detach().cpu().numpy() # S
-
+ traj = trajs[:, i].long().detach().cpu().numpy() # S, 2
+ valid = valids[:, i].long().detach().cpu().numpy() # S
+
# print('traj', traj.shape)
# print('valid', valid.shape)
for t in range(S):
if valid[t]:
# traj_seq = traj[max(t-16,0):t+1]
- traj_seq = traj[max(t-8,0):t+1]
- val_seq = np.linspace(0,1,len(traj_seq))
+ traj_seq = traj[max(t - 8, 0) : t + 1]
+ val_seq = np.linspace(0, 1, len(traj_seq))
# if t<2:
# val_seq = np.zeros_like(val_seq)
# print('val_seq', val_seq)
@@ -209,21 +268,37 @@ def summ_traj2ds_on_rgbs(self, name, trajs, rgbs, valids=None, frame_ids=None, o
# val_seq = np.arange(8)/8.0
# val_seq = val_seq[-len(traj_seq):]
# rgbs_color[t] = self.draw_traj_on_image_py(rgbs_color[t], traj_seq, S=S, show_dots=show_dots, cmap=cmap_, val=val_seq, linewidth=linewidth)
- rgbs_color[t] = self.draw_traj_on_image_py(rgbs_color[t], traj_seq, S=S, show_dots=show_dots, cmap=cmap_, val=val_seq, linewidth=linewidth)
+ rgbs_color[t] = self.draw_traj_on_image_py(
+ rgbs_color[t],
+ traj_seq,
+ S=S,
+ show_dots=show_dots,
+ cmap=cmap_,
+ val=val_seq,
+ linewidth=linewidth,
+ )
# input()
for i in range(N):
- if cmap=='onediff' and i==0:
- cmap_ = 'spring'
- elif cmap=='onediff':
- cmap_ = 'winter'
+ if cmap == "onediff" and i == 0:
+ cmap_ = "spring"
+ elif cmap == "onediff":
+ cmap_ = "winter"
else:
cmap_ = cmap
- traj = trajs[:,i] # S,2
+ traj = trajs[:, i] # S,2
# vis = visibles[:,i] # S
- vis = torch.ones_like(traj[:,0]) # S
- valid = valids[:,i] # S
- rgbs_color = self.draw_circ_on_images_py(rgbs_color, traj, vis, S=0, show_dots=show_dots, cmap=cmap_, linewidth=linewidth)
+ vis = torch.ones_like(traj[:, 0]) # S
+ valid = valids[:, i] # S
+ rgbs_color = self.draw_circ_on_images_py(
+ rgbs_color,
+ traj,
+ vis,
+ S=0,
+ show_dots=show_dots,
+ cmap=cmap_,
+ linewidth=linewidth,
+ )
rgbs = []
for rgb in rgbs_color:
@@ -232,78 +307,96 @@ def summ_traj2ds_on_rgbs(self, name, trajs, rgbs, valids=None, frame_ids=None, o
return self.summ_rgbs(name, rgbs, only_return=only_return, frame_ids=frame_ids)
-
- def draw_traj_on_images_py(self, rgbs, traj, S=50, linewidth=1, show_dots=False, cmap='coolwarm', maxdist=None):
+ def draw_traj_on_images_py(
+ self,
+ rgbs,
+ traj,
+ S=50,
+ linewidth=1,
+ show_dots=False,
+ cmap="coolwarm",
+ maxdist=None,
+ ):
# all inputs are numpy tensors
# rgbs is a list of H,W,3
# traj is S,2
H, W, C = rgbs[0].shape
- assert(C==3)
+ assert C == 3
rgbs = [rgb.astype(np.uint8).copy() for rgb in rgbs]
S1, D = traj.shape
- assert(D==2)
-
- x = int(np.clip(traj[0,0], 0, W-1))
- y = int(np.clip(traj[0,1], 0, H-1))
- color = rgbs[0][y,x]
- color = (int(color[0]),int(color[1]),int(color[2]))
+ assert D == 2
+
+ x = int(np.clip(traj[0, 0], 0, W - 1))
+ y = int(np.clip(traj[0, 1], 0, H - 1))
+ color = rgbs[0][y, x]
+ color = (int(color[0]), int(color[1]), int(color[2]))
for s in range(S):
# bak_color = np.array(color_map(1.0)[:3]) * 255 # rgb
# cv2.circle(rgbs[s], (traj[s,0], traj[s,1]), linewidth*4, bak_color, -1)
- cv2.polylines(rgbs[s],
- [traj[:s+1]],
- False,
- color,
- linewidth,
- cv2.LINE_AA)
+ cv2.polylines(
+ rgbs[s], [traj[: s + 1]], False, color, linewidth, cv2.LINE_AA
+ )
return rgbs
-
- def draw_circ_on_images_py(self, rgbs, traj, vis, S=50, linewidth=1, show_dots=False, cmap=None, maxdist=None):
+ def draw_circ_on_images_py(
+ self,
+ rgbs,
+ traj,
+ vis,
+ S=50,
+ linewidth=1,
+ show_dots=False,
+ cmap=None,
+ maxdist=None,
+ ):
# all inputs are numpy tensors
# rgbs is a list of 3,H,W
# traj is S,2
H, W, C = rgbs[0].shape
- assert(C==3)
+ assert C == 3
rgbs = [rgb.astype(np.uint8).copy() for rgb in rgbs]
S1, D = traj.shape
- assert(D==2)
+ assert D == 2
if cmap is None:
bremm = ColorMap2d()
traj_ = traj[0:1].astype(np.float32)
- traj_[:,0] /= float(W)
- traj_[:,1] /= float(H)
+ traj_[:, 0] /= float(W)
+ traj_[:, 1] /= float(H)
color = bremm(traj_)
# print('color', color)
- color = (color[0]*255).astype(np.uint8)
+ color = (color[0] * 255).astype(np.uint8)
# color = (int(color[0]),int(color[1]),int(color[2]))
- color = (int(color[2]),int(color[1]),int(color[0]))
-
+ color = (int(color[2]), int(color[1]), int(color[0]))
+
for s in range(S1):
if cmap is not None:
color_map = cm.get_cmap(cmap)
# color = np.array(color_map(s/(S-1))[:3]) * 255 # rgb
- color = np.array(color_map((s+1)/max(1,float(S-1)))[:3]) * 255 # rgb
+ color = (
+ np.array(color_map((s + 1) / max(1, float(S - 1)))[:3]) * 255
+ ) # rgb
# color = color.astype(np.uint8)
# color = (color[0], color[1], color[2])
# print('color', color)
# import ipdb; ipdb.set_trace()
-
- cv2.circle(rgbs[s], (int(traj[s,0]), int(traj[s,1])), linewidth+1, color, -1)
+
+ cv2.circle(
+ rgbs[s], (int(traj[s, 0]), int(traj[s, 1])), linewidth + 1, color, -1
+ )
# vis_color = int(np.squeeze(vis[s])*255)
# vis_color = (vis_color,vis_color,vis_color)
# cv2.circle(rgbs[s], (int(traj[s,0]), int(traj[s,1])), linewidth+1, vis_color, -1)
-
+
return rgbs
def visualize_folder(folder):
- assert(folder.name == "frames")
+ assert folder.name == "frames"
images = glob.glob(str(folder / "Image/*/*.png"))
lists = {}
@@ -322,36 +415,58 @@ def visualize_folder(folder):
for key in lists:
sub_lists = sorted(lists[key])
- rgbs = np.stack([cv2.imread(file).transpose((2, 0, 1)) for file in sub_lists])[:,::-1].copy()
+ rgbs = np.stack([cv2.imread(file).transpose((2, 0, 1)) for file in sub_lists])[
+ :, ::-1
+ ].copy()
S, C, H, W = rgbs.shape
# pick N points to track; we'll use a uniform grid
N = 1024
N_ = np.sqrt(N).round().astype(np.int32)
grid_y, grid_x = np.meshgrid(range(N_), range(N_), indexing="ij")
- grid_y = (8 + grid_y.reshape(1, -1)/float(N_-1) * (H-16)).astype(np.int32)
- grid_x = (8 + grid_x.reshape(1, -1)/float(N_-1) * (W-16)).astype(np.int32)
- xy0 = np.stack([grid_x, grid_y], axis=-1) # B, N_*N_, 2
- trajs_e = np.zeros((1, S, N_*N_, 2))
+ grid_y = (8 + grid_y.reshape(1, -1) / float(N_ - 1) * (H - 16)).astype(np.int32)
+ grid_x = (8 + grid_x.reshape(1, -1) / float(N_ - 1) * (W - 16)).astype(np.int32)
+ xy0 = np.stack([grid_x, grid_y], axis=-1) # B, N_*N_, 2
+ trajs_e = np.zeros((1, S, N_ * N_, 2))
for file in sub_lists:
file = Path(file)
frame = parse_suffix(file.name)["frame"]
- traj_path = folder / "PointTraj3D" / file.parent.name / ("PointTraj3D"+file.name[5:-4]+".npy")
+ traj_path = (
+ folder
+ / "PointTraj3D"
+ / file.parent.name
+ / ("PointTraj3D" + file.name[5:-4] + ".npy")
+ )
traj = np.load(traj_path)
- trajs_e[:, frame - frame_start] = xy0 + traj[grid_y * 2, grid_x * 2][..., :2]
+ trajs_e[:, frame - frame_start] = (
+ xy0 + traj[grid_y * 2, grid_x * 2][..., :2]
+ )
- trajs_e = trajs_e.transpose(0, 1, 3, 2) # 1, S, 2, N_*N_
- trajs_e = trajs_e.reshape(S*2, -1).transpose() # N_*N_, S*2
+ trajs_e = trajs_e.transpose(0, 1, 3, 2) # 1, S, 2, N_*N_
+ trajs_e = trajs_e.reshape(S * 2, -1).transpose() # N_*N_, S*2
mask = ~np.isnan(trajs_e).any(axis=1)
- trajs_e = trajs_e[mask].transpose().reshape(1, S, 2, -1) # 1, S, 2, K
+ trajs_e = trajs_e[mask].transpose().reshape(1, S, 2, -1) # 1, S, 2, K
trajs_e = trajs_e.transpose(0, 1, 3, 2)
summ = Summ_writer(just_gif=True)
- results = summ.summ_traj2ds_on_rgbs(None, torch.from_numpy(trajs_e), torch.from_numpy(rgbs).unsqueeze(0), only_return=True)
+ results = summ.summ_traj2ds_on_rgbs(
+ None,
+ torch.from_numpy(trajs_e),
+ torch.from_numpy(rgbs).unsqueeze(0),
+ only_return=True,
+ )
results = results.numpy()
frames = [results[0, i].transpose((1, 2, 0)) for i in range(results.shape[1])]
suffix = Path(sub_lists[0]).name[5:-4]
- imageio.mimsave(str(folder / "PointTraj3D" / Path(sub_lists[0]).parent.name / ("PointTrajVis" + suffix + ".gif")), frames)
+ imageio.mimsave(
+ str(
+ folder
+ / "PointTraj3D"
+ / Path(sub_lists[0]).parent.name
+ / ("PointTrajVis" + suffix + ".gif")
+ ),
+ frames,
+ )
if __name__ == "__main__":
diff --git a/infinigen_examples/constraints/home.py b/infinigen_examples/constraints/home.py
index f03f51038..e71402ef4 100644
--- a/infinigen_examples/constraints/home.py
+++ b/infinigen_examples/constraints/home.py
@@ -708,14 +708,18 @@ def vertical_diff(o, r):
deskchair = furniture[seating.OfficeChairFactory].related_to(
desks, cu.front_to_front
)
- monitors = obj[appliances.MonitorFactory]
+ desk_monitors = (
+ obj[appliances.MonitorFactory]
+ .related_to(desks, cu.ontop)
+ .related_to(desks, cu.back_coplanar_back)
+ )
constraints["desk"] = rooms.all(
lambda r: (
desks.related_to(r).all(
lambda t: (
deskchair.related_to(r).related_to(t).count().in_range(0, 1)
- * monitors.related_to(t, cu.ontop).count().equals(1)
+ * desk_monitors.related_to(t, cu.ontop).count().equals(1)
* (obj[Semantics.OfficeShelfItem].related_to(t, cu.on).count() >= 0)
* (deskchair.related_to(r).related_to(t).count() == 1)
)
@@ -730,15 +734,6 @@ def vertical_diff(o, r):
+ d.distance(doors.related_to(r)).maximize(weight=0.1)
+ cl.accessibility_cost(d, furniture.related_to(r)).minimize(weight=3)
+ cl.accessibility_cost(d, r).minimize(weight=3)
- + monitors.related_to(d).mean(
- lambda m: (
- cl.accessibility_cost(m, r, dist=2).minimize(weight=3)
- + cl.accessibility_cost(
- m, obj.related_to(r), dist=0.5
- ).minimize(weight=3)
- + m.distance(r, cu.walltags).hinge(0.1, 1e7).minimize(weight=1)
- )
- )
+ deskchair.distance(rooms, cu.walltags).maximize(weight=1)
)
)
@@ -956,49 +951,16 @@ def vertical_diff(o, r):
cu.bottom, {Subpart.SupportSurface}, margin=0.001
)
cl.StableAgainst(cu.back, cu.walltags, margin=0.1)
- kitchen_sink = obj[Semantics.Sink][table_decorations.SinkFactory].related_to(
- countertops, sink_flush_on_counter
+ kitchen_sink = (
+ obj[Semantics.Sink][table_decorations.SinkFactory]
+ .related_to(countertops, sink_flush_on_counter)
+ .related_to(countertops, cu.front_coplanar_front)
)
+
constraints["kitchen_sink"] = kitchens.all(
lambda r: (
- # those sinks can be on either type of counter
kitchen_sink.related_to(wallcounter.related_to(r)).count().in_range(0, 1)
- * kitchen_sink.related_to(island.related_to(r))
- .count()
- .in_range(0, 1) # island sinks dont need to be against wall
- * countertops.related_to(r).all(
- lambda c: (
- kitchen_sink.related_to(c).all(
- lambda s: s.distance(c, cu.side).in_range(0.05, 0.2)
- )
- )
- )
- )
- )
-
- score_terms["kitchen_sink"] = kitchens.mean(
- lambda r: (
- countertops.mean(
- lambda c: kitchen_sink.related_to(c).mean(
- lambda s: (
- (s.volume(dims=2) / c.volume(dims=2))
- .hinge(0.2, 0.4)
- .minimize(weight=10)
- )
- )
- )
- + island.related_to(r).mean(
- lambda isl: ( # sinks on islands must be near to edge and oriented outwards
- kitchen_sink.related_to(isl).mean(
- lambda s: (
- cl.angle_alignment_cost(s, isl, cu.side).minimize(weight=10)
- + cl.distance(s, isl, cu.side)
- .hinge(0.05, 0.07)
- .minimize(weight=10)
- )
- )
- )
- )
+ * kitchen_sink.related_to(island.related_to(r)).count().in_range(0, 1)
)
)
@@ -1006,8 +968,10 @@ def vertical_diff(o, r):
kitchen_appliances_big = kitchen_appliances.related_to(
kitchens, cu.on_floor
).related_to(kitchens, cu.against_wall)
- microwaves = kitchen_appliances[appliances.MicrowaveFactory].related_to(
- wallcounter, cu.on
+ microwaves = (
+ kitchen_appliances[appliances.MicrowaveFactory]
+ .related_to(wallcounter, cu.on)
+ .related_to(wallcounter, cu.back_coplanar_back)
)
constraints["kitchen_appliance"] = kitchens.all(
@@ -1200,7 +1164,11 @@ def freestanding(o, r):
)
)
- tvs = obj[appliances.TVFactory].related_to(tvstands, cu.ontop)
+ tvs = (
+ obj[appliances.TVFactory]
+ .related_to(tvstands, cu.ontop)
+ .related_to(tvstands, cu.back_coplanar_back)
+ )
if params["has_tv"]:
constraints["tv"] = livingrooms.all(
diff --git a/infinigen_examples/constraints/util.py b/infinigen_examples/constraints/util.py
index 7d27df08d..d7202c63a 100644
--- a/infinigen_examples/constraints/util.py
+++ b/infinigen_examples/constraints/util.py
@@ -69,6 +69,9 @@
hanging = cl.StableAgainst(top, ceilingtags, margin=0.05)
side_against_wall = cl.StableAgainst(side, walltags, margin=0.05)
+front_coplanar_front = cl.CoPlanar(front, front, margin=0.05, rev_normal=True)
+back_coplanar_back = cl.CoPlanar(back, back, margin=0.05, rev_normal=True)
+
ontop = cl.StableAgainst(bottom, top)
on = cl.StableAgainst(bottom, {t.Subpart.SupportSurface})
diff --git a/infinigen_examples/generate_individual_assets.py b/infinigen_examples/generate_individual_assets.py
index 7c9684d02..bcc7b810d 100644
--- a/infinigen_examples/generate_individual_assets.py
+++ b/infinigen_examples/generate_individual_assets.py
@@ -13,7 +13,6 @@
import logging
import math
import os
-import random
import re
import subprocess
import traceback
@@ -45,15 +44,16 @@
# from infinigen.core.rendering.render import enable_gpu
from infinigen.assets.utils.decorate import read_base_co, read_co
-from infinigen.assets.utils.misc import assign_material, subclasses
+from infinigen.assets.utils.misc import assign_material
from infinigen.core import init, surface
from infinigen.core.init import configure_cycles_devices
-from infinigen.core.placement import AssetFactory, density
+from infinigen.core.placement import density
from infinigen.core.tagging import tag_system
# noinspection PyUnresolvedReferences
from infinigen.core.util import blender as butil
from infinigen.core.util.camera import points_inview
+from infinigen.core.util.logging import save_polycounts
from infinigen.core.util.math import FixedSeed
from infinigen.core.util.test_utils import load_txt_list
from infinigen.tools import export
@@ -210,11 +210,9 @@ def build_scene_surface(args, factory_name, idx):
if args.dryrun:
return
- if hasattr(template, "make_sphere"):
- asset = template.make_sphere()
- else:
- bpy.ops.mesh.primitive_ico_sphere_add(radius=0.8, subdivisions=9)
- asset = bpy.context.active_object
+ bpy.ops.mesh.primitive_ico_sphere_add(radius=0.8, subdivisions=9)
+ asset = bpy.context.active_object
+
if type(template) is type:
template = template(idx)
template.apply(asset)
@@ -230,16 +228,20 @@ def build_and_save_asset(payload: dict):
args = payload["args"]
idx = payload["idx"]
+ output_folder = args.output_folder / f"{factory_name}_{idx:03d}"
+
+ if output_folder.exists() and args.skip_existing:
+ print(f"Skipping {output_folder}")
+ return
+
+ output_folder.mkdir(exist_ok=True)
+
logger.info(f"Building scene for {factory_name} {idx}")
if args.seed > 0:
idx = args.seed
- path = args.output_folder / factory_name
- if (path / f"images/image_{idx:03d}.png").exists() and args.skip_existing:
- print(f"Skipping {path}")
- return
- path.mkdir(exist_ok=True)
+ surface.registry.initialize_from_gin()
scene = bpy.context.scene
scene.render.engine = "CYCLES"
@@ -251,9 +253,9 @@ def build_and_save_asset(payload: dict):
if not args.fire:
bpy.context.scene.render.film_transparent = args.film_transparent
- bpy.context.scene.world.node_tree.nodes["Background"].inputs[0].default_value[
- -1
- ] = 0
+ bg = bpy.context.scene.world.node_tree.nodes["Background"]
+ bg.inputs[0].default_value[-1] = 0
+
camera, center = setup_camera(args)
if "Factory" in factory_name:
@@ -264,6 +266,9 @@ def build_and_save_asset(payload: dict):
if args.dryrun:
return
+ with (output_folder / "polycounts.txt").open("w") as f:
+ save_polycounts(f)
+
configure_cycles_devices()
with FixedSeed(args.lighting + idx):
@@ -300,28 +305,24 @@ def build_and_save_asset(payload: dict):
cam_info_ng.nodes["Object Info"].inputs["Object"].default_value = camera
if args.save_blend:
- (path / "scenes").mkdir(exist_ok=True)
- butil.save_blend(f"{path}/scenes/scene_{idx:03d}.blend", autopack=True)
- tag_system.save_tag(f"{path}/MaskTag.json")
+ butil.save_blend(output_folder / "scene.blend", autopack=True)
+ tag_system.save_tag(output_folder / "MaskTag.json")
if args.fire:
- bpy.data.worlds["World"].node_tree.nodes["Background.001"].inputs[
- 1
- ].default_value = 0.04
+ bg = bpy.data.worlds["World"].node_tree.nodes["Background.001"]
+ bg.inputs[1].default_value = 0.04
bpy.context.scene.view_settings.exposure = -2
if args.render == "image":
- (path / "images").mkdir(exist_ok=True)
- imgpath = path / f"images/image_{idx:03d}.png"
- scene.render.filepath = str(imgpath)
+ image_path = output_folder / "Image.png"
+ scene.render.filepath = str(image_path)
bpy.ops.render.render(write_still=True)
elif args.render == "video":
bpy.context.scene.frame_end = args.frame_end
- parent(asset).driver_add("rotation_euler")[
- -1
- ].driver.expression = f"frame/{args.frame_end / (2 * np.pi * args.cycles)}"
- (path / "frames" / f"scene_{idx:03d}").mkdir(parents=True, exist_ok=True)
- imgpath = path / f"frames/scene_{idx:03d}/frame_###.png"
+ driver = parent(asset).driver_add("rotation_euler")[-1]
+ driver.driver.expression = f"frame/{args.frame_end / (2 * np.pi * args.cycles)}"
+
+ imgpath = output_folder / "Image_###.png"
scene.render.filepath = str(imgpath)
bpy.ops.render.render(animation=True)
elif args.render == "none":
@@ -330,8 +331,7 @@ def build_and_save_asset(payload: dict):
raise ValueError(f"Unrecognized {args.render=}")
if args.export is not None:
- export_path = path / "export" / f"export_{idx:03d}"
- export_path.mkdir(exist_ok=True, parents=True)
+ export_path = args.output_folder / f"export_{idx:03d}"
export.export_curr_scene(
export_path, format=args.export, image_res=args.export_texture_res
)
@@ -360,48 +360,37 @@ def adjust_cam_distance(asset, camera, margin, percent=0.999):
camera.location[1] = -6
-def make_grid(args, path, n):
- files = []
- for filename in sorted(os.listdir(f"{path}/images")):
- if filename.endswith(".png"):
- files.append(f"{path}/images/{filename}")
- files = files[:n]
- if len(files) == 0:
- print("No images found")
- return
+def make_grid(args, name, files, n):
+ path = args.output_folder
+
with Image.open(files[0]) as i:
x, y = i.size
- for i, name in enumerate([path.stem, f"{path.stem}_"]):
- if args.zoom:
- img = Image.new("RGBA", (2 * x, y))
- sz = int(np.floor(np.sqrt(n - 0.9)))
- if i > 0:
- random.shuffle(files)
- with Image.open(files[0]) as i:
- img.paste(i, (0, 0))
- for idx in range(sz**2):
- with Image.open(files[min(idx + 1, len(files) - 1)]) as i:
- img.paste(
- i.resize((x // sz, y // sz)),
- (x + (idx % sz) * (x // sz), idx // sz * (y // sz)),
- )
- img.save(f"{path}/{name}.png")
- else:
- sz_x = list(
- sorted(
- range(1, n + 1),
- key=lambda x: abs(math.ceil(n / x) / x - args.best_ratio),
+
+ if args.zoom:
+ img = Image.new("RGBA", (2 * x, y))
+ sz = int(np.floor(np.sqrt(n - 0.9)))
+ with Image.open(files[0]) as i:
+ img.paste(i, (0, 0))
+ for idx in range(sz**2):
+ with Image.open(files[min(idx + 1, len(files) - 1)]) as i:
+ img.paste(
+ i.resize((x // sz, y // sz)),
+ (x + (idx % sz) * (x // sz), idx // sz * (y // sz)),
)
- )[0]
- sz_y = math.ceil(n / sz_x)
- img = Image.new("RGBA", (sz_x * x, sz_y * y))
- if i > 0:
- random.shuffle(files)
- for idx, file in enumerate(files):
- with Image.open(file) as i:
- img.paste(i, (idx % sz_x * x, idx // sz_x * y))
- img.save(f"{path}/{name}.png")
- print(f"{path}/{name}.png generated")
+ img.save(path / f"{path}/{name}.png")
+ else:
+ sz_x = list(
+ sorted(
+ range(1, n + 1),
+ key=lambda x: abs(math.ceil(n / x) / x - args.best_ratio),
+ )
+ )[0]
+ sz_y = math.ceil(n / sz_x)
+ img = Image.new("RGBA", (sz_x * x, sz_y * y))
+ for idx, file in enumerate(files):
+ with Image.open(file) as i:
+ img.paste(i, (idx % sz_x * x, idx // sz_x * y))
+ img.save(f"{path}/{name}.png")
def setup_camera(args):
@@ -426,7 +415,13 @@ def setup_camera(args):
return camera, camera.parent
-def mapfunc(f, its, args):
+@gin.configurable
+def mapfunc(
+ f,
+ its,
+ args,
+ slurm_nodelist=None,
+):
if args.n_workers == 1:
return [f(i) for i in its]
elif not args.slurm:
@@ -434,13 +429,21 @@ def mapfunc(f, its, args):
return list(p.imap(f, its))
else:
executor = submitit.AutoExecutor(folder=args.output_folder / "logs")
+
+ slurm_additional_parameters = {}
+
+ if slurm_nodelist is not None:
+ slurm_additional_parameters["nodelist"] = slurm_nodelist
+
executor.update_parameters(
name=args.output_folder.name,
timeout_min=60,
- cpus_per_task=2,
+ cpus_per_task=4,
mem_gb=8,
- slurm_partition=os.environ["INFINIGEN_SLURMPARTITION"],
+ gpus_per_node=1 if args.gpu else 0,
+ slurm_partition=os.environ.get("INFINIGEN_SLURMPARTITION"),
slurm_array_parallelism=args.n_workers,
+ slurm_additional_parameters=slurm_additional_parameters,
)
jobs = executor.map_array(f, its)
for j in jobs:
@@ -452,9 +455,10 @@ def main(args):
init.apply_gin_configs(
["infinigen_examples/configs_indoor", "infinigen_examples/configs_nature"],
+ configs=args.configs,
+ overrides=args.overrides,
skip_unknown=True,
)
- surface.registry.initialize_from_gin()
if args.debug is not None:
for name in logging.root.manager.loggerDict:
@@ -468,75 +472,45 @@ def main(args):
if args.gpu:
init.configure_render_cycles()
- if ".txt" in args.factories[0]:
- name = args.factories[0].split(".")[-2].split("/")[-1]
- else:
- name = "_".join(args.factories)
-
if args.output_folder is None:
- args.output_folder = Path(os.getcwd()) / "outputs"
+ outputs = Path("outputs")
+ assert outputs.exists(), outputs
+ name = "_".join(args.factories)
+ args.output_folder = Path("outputs") / name
- path = Path(args.output_folder) / name
- path.mkdir(exist_ok=True, parents=True)
+ args.output_folder.mkdir(exist_ok=True, parents=True)
factories = list(args.factories)
- if "ALL_ASSETS" in factories:
- factories += [f.__name__ for f in subclasses(AssetFactory)]
- factories.remove("ALL_ASSETS")
- logger.warning(
- "ALL_ASSETS is deprecated. Use `-f tests/assets/list_nature_meshes.txt` and `-f tests/assets/list_indoor_meshes.txt` instead."
- )
- if "ALL_SCATTERS" in factories:
- factories += [f.stem for f in Path("infinigen/assets/scatters").iterdir()]
- factories.remove("ALL_SCATTERS")
- if "ALL_MATERIALS" in factories:
- factories += [f.stem for f in Path("infinigen/assets/materials").iterdir()]
- factories.remove("ALL_MATERIALS")
- logger.warning(
- "ALL_MATERIALS is deprecated. Use `-f tests/assets/list_nature_materials.txt` and `-f tests/assets/list_indoor_materials.txt` instead."
- )
-
- has_txt = ".txt" in factories[0]
- if has_txt:
+ if len(factories) == 1 and factories[0].endswith(".txt"):
factories = [
- f.split(".")[-1] for f in load_txt_list(factories[0], skip_sharp=False)
+ f.split(".")[-1]
+ for f in load_txt_list(factories[0], skip_sharp=False)
]
+ else:
+ assert not any(f.endswith(".txt") for f in factories)
+
+ targets = [
+ {"args": args, "fac": fac, "idx": idx}
+ for idx in range(args.n_images)
+ for fac in factories
+ ]
if not args.postprocessing_only:
- for fac in factories:
- targets = [
- {"args": args, "fac": fac, "idx": idx} for idx in range(args.n_images)
- ]
- mapfunc(build_and_save_asset, targets, args)
+ mapfunc(build_and_save_asset, targets, args)
if args.dryrun:
return
- for j, fac in enumerate(factories):
- fac_path = args.output_folder / fac
- fac_path.mkdir(exist_ok=True, parents=True)
-
- f"{fac_path} does not exist"
- if has_txt:
- for i in range(args.n_images):
- img_path = fac_path / "images" / f"image_{i:03d}.png"
- if img_path.exists():
- subprocess.run(
- f"cp -f {img_path} {path}/{fac}_{i:03d}.png", shell=True
- )
- else:
- print(f"{img_path} does not exist")
- elif args.render == "image":
- make_grid(args, fac_path, args.n_images)
+ for fac in factories:
+ if args.render == "image":
+ files = list(args.output_folder.glob(f"{fac}_*/Image*.png"))
+ make_grid(args, "grid_" + fac, files, args.n_images)
elif args.render == "video":
- (fac_path / "videos").mkdir(exist_ok=True)
- for i in range(args.n_images):
- subprocess.run(
- f'ffmpeg -y -r 24 -pattern_type glob -i "{fac_path}/frames/scene_{i:03d}/frame*.png" '
- f"{fac_path}/videos/video_{i:03d}.mp4",
- shell=True,
- )
+ subprocess.run(
+ f'ffmpeg -y -r 24 -pattern_type glob -i "{fac}_*/Image*.png" video.mp4',
+ shell=True,
+ )
def snake_case(s):
@@ -693,6 +667,20 @@ def make_args():
action="store_true",
help="Import assets but do not run them. Used for testing.",
)
+ parser.add_argument(
+ "--configs",
+ type=str,
+ nargs="+",
+ default=[],
+ help="List of gin config files to apply",
+ )
+ parser.add_argument(
+ "--overrides",
+ type=str,
+ nargs="+",
+ default=[],
+ help="List of gin overrides to apply",
+ )
return init.parse_args_blender(parser)
diff --git a/infinigen_examples/generate_indoors.py b/infinigen_examples/generate_indoors.py
index 337133fa7..129d9a8c3 100644
--- a/infinigen_examples/generate_indoors.py
+++ b/infinigen_examples/generate_indoors.py
@@ -481,6 +481,8 @@ def invisible_room_ceilings():
break
break
+ p.save_results(output_folder / "pipeline_coarse.csv")
+
return {
"height_offset": height,
"whole_bbox": house_bbox,
diff --git a/infinigen_examples/generate_material_balls.py b/infinigen_examples/generate_material_balls.py
index ce64e9e5a..384989a38 100644
--- a/infinigen_examples/generate_material_balls.py
+++ b/infinigen_examples/generate_material_balls.py
@@ -82,18 +82,14 @@ def build_scene_surface(factory_name, idx):
raise Exception(f"{factory_name} not Found.")
if type(template) is type:
template = template(idx)
- if hasattr(template, "make_sphere"):
- asset = template.make_sphere()
- asset.scale = [0.3] * 3
- butil.apply_transform(asset)
- else:
- bpy.ops.mesh.primitive_ico_sphere_add(radius=scale, subdivisions=7)
- asset = bpy.context.active_object
- asset.rotation_euler = (
- uniform(np.pi / 6, np.pi / 3),
- uniform(-np.pi / 12, np.pi / 12),
- uniform(-np.pi / 12, np.pi / 12),
- )
+
+ bpy.ops.mesh.primitive_ico_sphere_add(radius=scale, subdivisions=7)
+ asset = bpy.context.active_object
+ asset.rotation_euler = (
+ uniform(np.pi / 6, np.pi / 3),
+ uniform(-np.pi / 12, np.pi / 12),
+ uniform(-np.pi / 12, np.pi / 12),
+ )
with FixedSeed(idx):
if "metal" in factory_name or "sofa_fabric" in factory_name:
diff --git a/pyproject.toml b/pyproject.toml
index 98935e94f..834eb4626 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -66,7 +66,8 @@ dev = [
"pytype",
"ruff",
"isort",
- "tabulate", # for integration test results
+ "tabulate",
+ "rapidfuzz",
"pre-commit"
]
diff --git a/tests/integration/integration_test_launch.sh b/tests/integration/integration_test_launch.sh
new file mode 100644
index 000000000..8b76e79e6
--- /dev/null
+++ b/tests/integration/integration_test_launch.sh
@@ -0,0 +1,67 @@
+# Environment Variables for Opting In/Out
+RUN_INDOOR=${RUN_INDOOR:-1}
+RUN_NATURE=${RUN_NATURE:-1}
+RUN_OBJECTS=${RUN_OBJECTS:-1}
+RUN_MATERIALS=${RUN_MATERIALS:-1}
+
+# Version Info
+INFINIGEN_VERSION=$(python -c "import infinigen; print(infinigen.__version__)")
+COMMIT_HASH=$(git rev-parse HEAD | cut -c 1-6)
+DATE=$(date '+%Y-%m-%d')
+JOBTAG="${DATE}_ifg-int"
+BRANCH=$(git rev-parse --abbrev-ref HEAD | sed 's/_/-/g')
+VERSION_STRING="${DATE}_${INFINIGEN_VERSION}_${BRANCH}_${COMMIT_HASH}_${USER}"
+OUTPUT_PATH=/n/fs/pvl-renders/integration_test/runs/
+
+mkdir -p $OUTPUT_PATH
+OUTPUT_PATH=$OUTPUT_PATH/$VERSION_STRING
+
+# Run Indoor Scene Generation
+if [ "$RUN_INDOOR" -eq 1 ]; then
+ for indoor_type in DiningRoom Bathroom Bedroom Kitchen LivingRoom; do
+ python -m infinigen.datagen.manage_jobs --output_folder $OUTPUT_PATH/${JOBTAG}_scene_indoor_$indoor_type \
+ --num_scenes 3 --cleanup big_files --configs singleroom --overwrite \
+ --pipeline_configs slurm_1h monocular indoor_background_configs.gin \
+ --pipeline_overrides get_cmd.driver_script=infinigen_examples.generate_indoors sample_scene_spec.seed_range=[0,100] slurm_submit_cmd.slurm_nodelist=$NODECONF \
+ --overrides compose_indoors.terrain_enabled=True restrict_solving.restrict_parent_rooms=\[\"$indoor_type\"\] compose_indoors.solve_small_enabled=False &
+ done
+fi
+
+# Run Nature Scene Generation
+if [ "$RUN_NATURE" -eq 1 ]; then
+ for nature_type in arctic canyon cave coast coral_reef desert forest kelp_forest mountain plain river snowy_mountain under_water; do
+ python -m infinigen.datagen.manage_jobs --output_folder $OUTPUT_PATH/${JOBTAG}_scene_nature_$nature_type \
+ --num_scenes 3 --cleanup big_files --overwrite \
+ --configs $nature_type.gin dev.gin \
+ --pipeline_configs slurm_1h monocular \
+ --pipeline_overrides sample_scene_spec.seed_range=[0,100] &
+ done
+fi
+
+# Objects
+if [ "$RUN_OBJECTS" -eq 1 ]; then
+
+ python -m infinigen_examples.generate_individual_assets \
+ -f tests/assets/list_nature_meshes.txt --output_folder $OUTPUT_PATH/${JOBTAG}_asset_nature_meshes \
+ --slurm --n_workers 100 -n 3 --gpu &
+
+ python -m infinigen_examples.generate_individual_assets \
+ -f tests/assets/list_indoor_meshes.txt --output_folder $OUTPUT_PATH/${JOBTAG}_asset_indoor_meshes \
+ --slurm --n_workers 100 -n 3 --gpu &
+fi
+
+# Materials
+if [ "$RUN_MATERIALS" -eq 1 ]; then
+
+ python -m infinigen_examples.generate_individual_assets \
+ -f tests/assets/list_indoor_materials.txt --output_folder $OUTPUT_PATH/${JOBTAG}_asset_indoor_materials \
+ --slurm --n_workers 100 -n 3 --gpu &
+
+
+ python -m infinigen_examples.generate_individual_assets \
+ -f tests/assets/list_nature_materials.txt --output_folder $OUTPUT_PATH/${JOBTAG}_asset_nature_materials \
+ --slurm --n_workers 100 -n 3 --gpu &
+fi
+
+# Wait for all background processes to finish
+wait
\ No newline at end of file
diff --git a/tests/integration/integration_test_parse_logs.py b/tests/integration/integration_test_parse_logs.py
new file mode 100644
index 000000000..a3084216d
--- /dev/null
+++ b/tests/integration/integration_test_parse_logs.py
@@ -0,0 +1,377 @@
+# Copyright (C) 2024, Princeton University.
+# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory
+# of this source tree.
+
+import argparse
+import os
+import re
+from datetime import timedelta
+from pathlib import Path
+
+import jinja2
+import pandas as pd
+
+
+def sizeof_fmt(num, suffix="B"):
+ for unit in ("", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"):
+ if abs(num) < 1024.0:
+ return f"{num:3.1f} {unit}{suffix}"
+ num /= 1024.0
+ return f"{num:.1f}Yi{suffix}"
+
+
+def td_to_str(td):
+ """
+ convert a timedelta object td to a string in HH:MM:SS format.
+ """
+ if pd.isnull(td):
+ return td
+ hours, remainder = divmod(td.total_seconds(), 3600)
+ minutes, seconds = divmod(remainder, 60)
+ return f"{int(hours):02}:{int(minutes):02}:{int(seconds):02}"
+
+
+def parse_scene_log(
+ scene_path: Path,
+):
+ ret_dict = {
+ "coarse_tris": "NAN",
+ "fine_tirs": "NAN",
+ "obj_count": "NAN",
+ "gen_time": "NAN",
+ "gen_mem_gb": "NAN",
+ "render_time": "NAN",
+ "gt_time": "NAN",
+ }
+
+ step_times = {
+ "fineterrain": [],
+ "coarse": [],
+ "populate": [],
+ "rendershort": [],
+ "shortrender": [],
+ "blendergt": [],
+ }
+
+ log_folder = scene_path/"logs"
+ coarse_folder = scene_path/"coarse"
+ fine_folder = scene_path/"fine"
+
+ if not (
+ log_folder.exists()
+ and coarse_folder.exists()
+ and fine_folder.exists()
+ ):
+ return ret_dict
+
+ for filepath in log_folder.glob("*.err"):
+ step = ""
+ for stepName in step_times:
+ if filepath.stem.startswith(stepName):
+ step = stepName
+ break
+ else:
+ continue
+ errFile = open(filepath)
+ text = errFile.read()
+ if "[MAIN TOTAL] finished in" not in text:
+ continue
+ search = re.search(
+ r"\[MAIN TOTAL\] finished in ([0-9]+):([0-9]+):([0-9]+)", text
+ )
+ d = None
+ if search is None:
+ search = re.search(
+ r"\[MAIN TOTAL\] finished in ([0-9]) day.*, ([0-9]+):([0-9]+):([0-9]+)",
+ text,
+ )
+ d, h, m, s = search.group(1, 2, 3, 4)
+ else:
+ h, m, s = search.group(1, 2, 3)
+ if d is None:
+ step_timedelta = timedelta(hours=int(h), minutes=int(m), seconds=int(s))
+ else:
+ step_timedelta = timedelta(
+ days=int(d), hours=int(h), minutes=int(m), seconds=int(s)
+ )
+ step_times[step].append(step_timedelta)
+
+ coarse_stage_df = pd.read_csv(os.path.join(coarse_folder, "pipeline_coarse.csv"))
+
+ if len(step_times["coarse"]) >= 1:
+ coarse_time = step_times["coarse"][0]
+ else:
+ coarse_time = timedelta(seconds=0)
+ if len(step_times["populate"]) >= 1:
+ pop_time = step_times["populate"][0]
+ else:
+ pop_time = timedelta(seconds=0)
+
+ if len(step_times["fineterrain"]) >= 1:
+ fine_time = step_times["fineterrain"][0]
+ else:
+ fine_time = timedelta(seconds=0)
+ if len(step_times["rendershort"]) >= 1:
+ render_time = step_times["rendershort"][0]
+ elif len(step_times["shortrender"]) >= 1:
+ render_time = step_times["shortrender"][0]
+ else:
+ render_time = timedelta(seconds=0)
+ if len(step_times["blendergt"]) >= 1:
+ gt_time = step_times["blendergt"][0]
+ else:
+ gt_time = timedelta(seconds=0)
+
+ mem = coarse_stage_df["mem_at_finish"].iloc[-1]
+ obj_count = coarse_stage_df["obj_count"].iloc[-1]
+
+ ret_dict = {
+ "obj_count": obj_count,
+ "gen_time": coarse_time + pop_time + fine_time,
+ "gen_mem_gb": sizeof_fmt(mem),
+ "render_time": render_time,
+ "gt_time": gt_time,
+ }
+
+ fine_poly = parse_poly_file(fine_folder/"polycounts.txt")
+ ret_dict["gen_triangles"] = fine_poly.get("Triangles", "NAN")
+
+ return ret_dict
+
+def parse_poly_file(path):
+ res = {}
+
+ if not path.exists():
+ return res
+
+ for l in path.read_text().splitlines():
+ fields = l.split(":")
+ if len(fields) != 2:
+ continue
+ k, v = fields
+ res[k] = v
+
+ return res
+
+def parse_asset_log(asset_path):
+
+ poly = parse_poly_file(asset_path/"polycounts.txt")
+
+ return {
+ "triangles": poly.get("Tris", "NAN"),
+ "gen_mem": poly.get("Memory", "NAN"),
+ }
+
+def format_stats(d):
+ return ", ".join(f"{k}: {v}" for k, v in d.items())
+
+def parse_run_df(run_path: Path):
+ runs = {
+ "_".join((x.name.split("_")[2:])): x for x in run_path.iterdir() if x.is_dir()
+ }
+ for k, v in runs.items():
+ print(k, v)
+
+ records = []
+
+ def scene_folders(type):
+ scenes = []
+
+ for name, path in runs.items():
+ if not name.startswith(type):
+ continue
+ for scene in path.iterdir():
+ if not scene.is_dir():
+ continue
+ if scene.name == "logs":
+ continue
+ scenes.append(scene)
+
+ return sorted(scenes)
+
+ IMG_NAME = "Image_0_0_0048_0.png"
+ NORMAL_NAME = "SurfaceNormal_0_0_0048_0.png"
+
+ for scene in scene_folders("scene_nature"):
+ stats = parse_scene_log(scene)
+ scenetype = "_".join(scene.parent.name.split("_")[2:])
+ img = scene / "frames" / "Image" / "camera_0" / IMG_NAME
+ normal = scene / "frames" / "SurfaceNormal" / "camera_0" / NORMAL_NAME
+ records.append(
+ {
+ "name": scenetype + "/" + scene.name,
+ "category": "scene_nature",
+ "img_path": img,
+ "normal_path": normal,
+ "stats": format_stats(stats),
+ }
+ )
+
+ for scene in scene_folders("scene_indoor"):
+ stats = parse_scene_log(scene)
+ scenetype = "_".join(scene.parent.name.split("_")[2:])
+ img = scene / "frames" / "Image" / "camera_0" / IMG_NAME
+ normal = scene / "frames" / "SurfaceNormal" / "camera_0" / NORMAL_NAME
+ records.append(
+ {
+ "name": scenetype + "/" + scene.name,
+ "category": "scene_indoor",
+ "img_path": img,
+ "normal_path": normal,
+ "stats": format_stats(stats),
+ }
+ )
+
+ for scene in scene_folders("asset"):
+ category = "_".join(scene.parent.name.split("_")[2:])
+ record = {
+ "category": category,
+ "name": category + "/" + scene.name,
+ "img_path": scene / "Image.png",
+ "stats": format_stats(parse_asset_log(scene)),
+ }
+
+ records.append(record)
+
+ print(f"{run_path=} found {len(records)} records")
+
+ return pd.DataFrame.from_records(records)
+
+
+def find_run(base_path: str, run: str) -> Path:
+ base_path = Path(base_path)
+
+ run_path = base_path / run
+ if run_path.exists():
+ return run_path
+
+ options = [x for x in base_path.iterdir() if run in x.name]
+ if len(options) == 1:
+ return options[0]
+ elif len(options) > 1:
+ raise ValueError(f"Multiple runs found for {run}, {options}")
+ else:
+ raise FileNotFoundError(f"Could not find match for {run=} in {base_path=}")
+
+
+def fuzzy_merge(dfA, dfB, keyA, keyB, threshold=1):
+
+ from rapidfuzz import fuzz, process
+
+ matches_A = []
+ matches_B = []
+
+
+ def preproc(x):
+ x = x.split('/')[-1]
+ x = re.sub(r'(?
+
+
+ {{ title }}
+
+
+
+
+
{{ heading }}
+
+
+
+
+ Version {{version_names[0]}}
+
+
+ Version {{version_names[1]}}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for scene in scene_nature %}
+
+
+
+
{{scene['name_A']}}
+
+
{{scene['stats_A']}}
+
+
+
+
+
{{scene['name_B']}}
+
+
{{scene['stats_B']}}
+
+
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+ {% for scene in scene_indoor %}
+
+
+
+
{{scene['name_A']}}
+
+
+
{{scene['stats_A']}}
+
+
+
+
{{scene['name_B']}}
+
+
+
{{scene['stats_B']}}
+
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+ {% for object in asset_nature_meshes %}
+
+
+
+
{{object['name_A']}}
+
+
{{object['stats_A']}}
+
+
+
+
+
{{object['name_B']}}
+
+
{{object['stats_B']}}
+
+
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for object in asset_indoor_meshes %}
+
+
+
+
{{object['name_A']}}
+
+
{{object['stats_A']}}
+
+
+
+
+
{{object['name_B']}}
+
+
{{object['stats_B']}}
+
+
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+ {% for material in asset_nature_materials %}
+
+
+
+
{{material['name_A']}}
+
+
{{material['stats_A']}}
+
+
+
+
+
{{material['name_B']}}
+
+
{{material['stats_B']}}
+
+
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+ {% for material in asset_indoor_materials %}
+
+
+
+
{{material['name_A']}}
+
+
{{material['stats_A']}}
+
+
+
+
+
{{material['name_B']}}
+
+
{{material['stats_B']}}
+
+
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/solver/test_stable_against.py b/tests/solver/test_stable_against.py
index a02cd3e84..7bac0b34c 100644
--- a/tests/solver/test_stable_against.py
+++ b/tests/solver/test_stable_against.py
@@ -52,6 +52,50 @@ def make_scene(loc2):
return state_def.State(objs=objs)
+def make_scene_coplanar(loc2):
+ """Create a scene with a table and a cup, and return the state."""
+ butil.clear_scene()
+ objs = {}
+
+ table = butil.spawn_cube(scale=(5, 5, 1), name="table")
+ cup = butil.spawn_cube(scale=(1, 1, 1), name="cup", location=loc2)
+
+ for o in [table, cup]:
+ butil.apply_transform(o)
+ parse_scene.preprocess_obj(o)
+ tagging.tag_canonical_surfaces(o)
+
+ assert table.scale == Vector((1, 1, 1))
+ assert cup.location != Vector((0, 0, 0))
+
+ bpy.context.view_layer.update()
+
+ objs["table"] = state_def.ObjectState(table)
+ objs["cup"] = state_def.ObjectState(cup)
+ objs["cup"].relations.append(
+ state_def.RelationState(
+ cl.StableAgainst({t.Subpart.Bottom}, {t.Subpart.Top}),
+ target_name="table",
+ child_plane_idx=0,
+ parent_plane_idx=0,
+ )
+ )
+ back = {t.Subpart.Back, -t.Subpart.Top, -t.Subpart.Front}
+ back_coplanar_back = cl.CoPlanar(back, back, margin=0)
+
+ objs["cup"].relations.append(
+ state_def.RelationState(
+ back_coplanar_back,
+ target_name="table",
+ child_plane_idx=0,
+ parent_plane_idx=0,
+ )
+ )
+ butil.save_blend("test.blend")
+
+ return state_def.State(objs=objs)
+
+
def test_stable_against():
# too low, intersects ground
assert not validity.check_post_move_validity(make_scene((0, 0, 0.5)), "cup")
@@ -150,5 +194,41 @@ def test_horizontal_stability():
# butil.save_blend('test.blend')
+def test_coplanar():
+ # Test case 1: Cup is stable against but not coplanar (should be invalid)
+ assert not validity.check_post_move_validity(make_scene_coplanar((0, 0, 1)), "cup")
+
+ # Test case 2: Cup is stable against and coplanar with the table (should be valid)
+ assert validity.check_post_move_validity(make_scene_coplanar((-2, 0, 1)), "cup")
+
+ # Test case 3: Cup is coplanar but not stable against (should be invalid)
+ assert not validity.check_post_move_validity(
+ make_scene_coplanar((-5.2, 0, 1)), "cup"
+ )
+
+ # Test case 4: Cup is neither stable against nor coplanar (should be invalid)
+ assert not validity.check_post_move_validity(
+ make_scene_coplanar((2, 2, 1.1)), "cup"
+ )
+
+ # Test case 5: Cup is at the back edge, stable against and coplanar (should be valid)
+ assert validity.check_post_move_validity(make_scene_coplanar((-2, 2, 1)), "cup")
+
+ # Test case 6: Cup is slightly off the back edge, not stable against but coplanar (should be invalid)
+ assert not validity.check_post_move_validity(
+ make_scene_coplanar((-2.1, 2, 1)), "cup"
+ )
+
+ # Test case 7: Cup is far from the table (should be invalid)
+ assert not validity.check_post_move_validity(
+ make_scene_coplanar((10, 10, 10)), "cup"
+ )
+
+ # Test case 8: Cup is inside the table, not stable against but coplanar (should be invalid)
+ assert not validity.check_post_move_validity(make_scene_coplanar((-2, 0, 0)), "cup")
+
+ print("All test cases for coplanar constraint passed successfully.")
+
+
if __name__ == "__main__":
- test_horizontal_stability()
+ test_coplanar()