From dd5d560cf0495b51b5991d9a8788d6a5ee4e7e7f Mon Sep 17 00:00:00 2001 From: DavidYan-1 Date: Sun, 4 Feb 2024 15:37:34 -0500 Subject: [PATCH] Texture-baked exporting (princeton-vl/infinigen_internal/#103) * Full Scene Exporter * Regex Tweaks for Integration Testing * Refactor and optimize export * Move exporting README to docs/ folder and add more caveats * Path handling tweaks for exporter * Move export to infinigen.tools.export * Tweak docs * Add slurm scheduling to generate_individual_assets * Tweak generate_individual_assets * Small Refactor * Glass Export and other features Glass, Individual object, .obj vertex col export and fixes UV overrwrite * Add --export option to generate_individual_assets, use a slurm job array per factory not for the whole set * Add docs on generating & exporting individual assets * Export Optimizations and Bugfixes * Remove Unused Args * Typo / import fixes * Add --export option to generate_individual_assets, use a slurm job array per factory not for the whole set * Fix kwargs --------- Co-authored-by: Alexander Raistrick --- README.md | 3 +- docs/CHANGELOG.md | 4 + docs/ExportingToExternalFileFormats.md | 56 ++ docs/GeneratingIndividualAssets.md | 39 +- infinigen/__init__.py | 2 +- infinigen/assets/lighting/__init__.py | 3 +- .../configs/compute_platform/slurm_1h.gin | 2 + infinigen/datagen/export/README.md | 46 -- infinigen/datagen/export/__init__.py | 0 infinigen/datagen/export/export.py | 305 --------- infinigen/tools/export.py | 632 ++++++++++++++++++ .../generate_individual_assets.py | 142 +++- tests/integration/manual_integration_check.py | 8 +- 13 files changed, 836 insertions(+), 406 deletions(-) create mode 100644 docs/ExportingToExternalFileFormats.md delete mode 100644 infinigen/datagen/export/README.md delete mode 100644 infinigen/datagen/export/__init__.py delete mode 100644 infinigen/datagen/export/export.py create mode 100644 infinigen/tools/export.py diff --git a/README.md b/README.md index 1ecb6710d..9d9e88ce1 100644 --- a/README.md +++ b/README.md @@ -45,8 +45,9 @@ Next, see our ["Hello World" example](docs/HelloWorld.md) to generate an image & - ["Hello World": Generate your first Infinigen scene](docs/HelloWorld.md) - [Configuring Infinigen](docs/ConfiguringInfinigen.md) - [Downloading pre-generated data](docs/PreGeneratedData.md) -- [Extended ground-truth](docs/GroundTruthAnnotations.md) - [Generating individual assets](docs/GeneratingIndividualAssets.md) +- [Exporting to external fileformats (OBJ, OpenUSD, etc)](docs/ExportingToExternalFileFormats.md) +- [Extended ground-truth](docs/GroundTruthAnnotations.md) - [Implementing new materials & assets](docs/ImplementingAssets.md) - [Generating fluid simulations](docs/GeneratingFluidSimulations.md) diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 480f5c4b8..6b4044f8b 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -39,3 +39,7 @@ v1.2.6 v1.3.1 - Fix configuration bug causing massive render slowdown - Create noisier video trajectories optimized for training + +v1.3.2 +- Bugfix USD/OBJ exporter, add export options to generate_individual_assets + diff --git a/docs/ExportingToExternalFileFormats.md b/docs/ExportingToExternalFileFormats.md new file mode 100644 index 000000000..f1f5de6b2 --- /dev/null +++ b/docs/ExportingToExternalFileFormats.md @@ -0,0 +1,56 @@ + +# Asset Exporter + +This documentation details how to create an OBJ, FBX, STL, PLY or OpenUSD file from a `.blend` file, such as those produced by [Hello World](HelloWorld.md) or [Generating Individual Assets](./GeneratingIndividualAssets.md). + +Blender does provide a built-in exporter, but it wont work out of the box for Infinigen since our files contain procedural materials and assets defined using shader programs. This tool's job is to "bake" all these procedural elements into more standard graphics formats (i.e, simple meshes with materials made of texture maps), before invoking the standard blender exporter. This process can be slow, since it uses a rendering engine, and lossy, since the resulting textures have a finite resolution. + +To convert a folder of blender files into USD files (our recommmended format), use the command below: +```bash +python -m infinigen.tools.export --input_folder {PATH_TO_FOLDER_OF_BLENDFILES} --output_folder outputs/my_export -f usdc -r 1024 +``` + +If you want a different output format, please use the "--help" flag or use one of the options below: +- `-f obj` will export in .obj format, +- `-f fbx` will export in .fbx format +- `-f stl` will export in .stl format +- `-f ply` will export in .ply format. +- `-f usdc` will export in .usdc format. +- `-v` enables per-vertex colors (only compatible with .fbx and .ply formats). +- `-r {INT}` controls the resolution of the baked texture maps. For instance, `-r 1024` will export 1024 x 1024 texture maps. +- `--individual` will export each object in a scene in its own individual file. + +## :warning: Exporting full Infinigen scenes is only supported for USDC files. + +:bulb: Note: exporting OBJ/FBX files of **single objects** *generally works fine; this discussion only refers to large-scale scenes. + +Infinigen uses of *instancing* to represent densely scattered objects. That is, rather than storing millions of unique high-detail pebbles or leaves to scatter on the floor, we use a smaller set of unique objects which are stored in memory only once, but are repeated all over the scene with many different transforms. + +To our knowledge, no file formats except '.blend' and '.usdc' support saving 3D files that contain instanced geometry. For all file formats besides these two, instanced will be *realized*: instead of storing just a few unique meshes, the meshes will be copied, pasted and transformed thousands of times (once for each unique scatter location). This creates a simple mesh that, but the cost is so high that we do not recommend attempting it for full Infinigen scenes. + +If you require OBJ/FBX/PLY files for your research, you have a few options: +- You can use individual objects, rather than full scenes. These *generally dont contain instancing so can be exported to simple mesh formats. +- You can use advice in [Configuring Infinigen](./ConfiguringInfinigen.md) to create a scene that has very small extent or low detail, such that the final realized mesh will still be small enough to fit in memory. +- You can use the advice in [Configuring Infinigen](./ConfiguringInfinigen.md) to create a scene which simply doesnt contain any instanced objects. Specifically, you should turn off trees and all scattered objects. + - The simplest way to do this is to turn off everything except terrain, by including the config `no_assets.gin`. + +*caveat for the above: Infinigen's implementation for trees uses instances to represent leaves and branches. Trees are also generally large and high detail enough to cause issues if you realize them before exporting. Therefore, exporting whole trees as OBJs also generally isnt supported, unless you do so at very low resolution, or you turn off the tree's branches / leaves first. + +## Other Known Issues and Limitations + +* Some material features used in Infinigen are not yet supported by this exporter. Specifically, this script only handles Albedo, Roughness and Metallicity maps. Any other procedural parameters of the material will be ignored, so you should not expect complex materials (e.g skin, translucent leaves, glowing lava) to be perfectly reproduced outside of Blender. Depending on file format, there is limited support for materials with non-procedural, constant values of transmission, clearcoat, and sheen. + +* Exporting *animated* 3D files is generally untested and not officially supported. This includes exporting particles, articulated creatures, deforming plants, etc. These features are *in principle* supported by OpenUSD, but are untested by us and not officially supported by this export script. + +* Assets with transparent materials (water, glass-like materials, etc.) may have incorrect textures for all material parameters after export. + +* Large scenes and assets may take a long time to export and will crash Blender if you do not have a sufficiently large amount of memory. The export results may also be unusably large. + +* When exporting in .fbx format, the embedded roughness texture maps in the file may sometimes be too bright or too dark. The .png roughness map in the folder is accurate, however. + +* .fbx exports ocassionally fail due to invalid UV values on complicated geometry. Adjusting the 'island_margin' value in bpy.ops.uv.smart_project() sometimes remedies this + + + + + diff --git a/docs/GeneratingIndividualAssets.md b/docs/GeneratingIndividualAssets.md index 34ef4e366..f0b2c2ffb 100644 --- a/docs/GeneratingIndividualAssets.md +++ b/docs/GeneratingIndividualAssets.md @@ -1,20 +1,16 @@ # Generating Individual Assets -This tutorial will help you generate images or .blend files of specific assets of your choice. - -Limitations (to be addressed soon): -- This tool only exports .blend files. [See here](../infinigen/datagen/tools/export/README.md) for a prototype tool to convert these to standard mesh file formats, but it itself has some limitations. -- This tool cannot currently generate or export terrain meshes. +This tutorial will help you generate images, .blend files, or baked OBJ/USD/etc files for any asset of your choosing. ### Example Commands -Shown are three examples of using our `generate_individual_assets.py` script to create images and .blend files. +Shown are three examples of using our `generate_individual_assets.py` script to create images and .blend files. If you want to create asset files in another format (e.g. OBJ, FBX, USD) you should first generate blend files then use our [](./ExportingToExternalFileFormats.md) docs to bake them to the format of your choosing. ```bash mkdir outputs -python -m infinigen_examples.generate_individual_assets -f CoralFactory -n 8 --save_blend -python -m infinigen_examples.generate_individual_assets -f seashells -n 1 --save_blend -python -m infinigen_examples.generate_individual_assets -f chunkyrock -n 1 --save_blend +python -m infinigen_examples.generate_individual_assets --output_folder outputs/corals -f CoralFactory -n 8 --save_blend +python -m infinigen_examples.generate_individual_assets --output_folder outputs/seashells -f seashells -n 1 --save_blend +python -m infinigen_examples.generate_individual_assets --output_folder outputs/chunkyrock -f chunkyrock -n 1 --save_blend ```

@@ -23,15 +19,26 @@ python -m infinigen_examples.generate_individual_assets -f chunkyrock -n 1 --sav

-Running the above commands will save images and .blend files into your `outputs` folder. +Running the above commands will save images and .blend files into your `outputs` folder. You can customize what object is generated by changing the `-f` argument to the name of a different AssetFactory defined in the codebase (see the file `tests/test_meshes_basic.txt` for a partial list). Please run `python -m infinigen.tools.generate_individual_assets --help` for a full list of commandline arguments. -The most commonly used arguments are: -- `-f` to specify the name(s) of assets or materials to generate. `-f NAME` can specify to generate three different types of objects: - - If `NAME` is the name of a class defined in `infinigen/assets`, then it will be treated as an AssetFactory and used to generate objects from scratch. For example, you can say `-f CactusFactory` or `-f CarnivoreFactory`, or use the name of any similar Factory class in the codebase. - - If `NAME` is the name of a file in `infinigen/assets/materials`, that material will be applied onto a sphere - - If `NAME` is the name of a file in `infinigen/assets/scatters`, that scatter generator will be applied nto a plane -- `-n` adjusts the number of images / blend files to be generated. +### Creating OBJ, USD, FBX and other file formats + +You can use the `--export` flag to export each generated asset to a format of your choosing. Please see [ExportingToExternalFileFormats](./ExportingToExternalFileFormats.md) for details and limitations regarding exporting. + +Examples: + +```bash + +# Save a coral as an OBJ with texture maps +python -m infinigen_examples.generate_individual_assets --output_folder outputs/corals -f CoralFactory -n 1 --render none --export obj + +# Save a bush as OpenUSD +python -m infinigen_examples.generate_individual_assets --output_folder outputs/bush -f BushFactory -n 1 --render none --export usdc + +# See the full list of supported formats +python -m infinigen_examples.generate_individual_assets --help +``` diff --git a/infinigen/__init__.py b/infinigen/__init__.py index 599b7d93a..395e1b7b1 100644 --- a/infinigen/__init__.py +++ b/infinigen/__init__.py @@ -1,3 +1,3 @@ import logging -__version__ = "1.3.1" +__version__ = "1.3.2" diff --git a/infinigen/assets/lighting/__init__.py b/infinigen/assets/lighting/__init__.py index ff5ab83c9..a4e8fefe5 100644 --- a/infinigen/assets/lighting/__init__.py +++ b/infinigen/assets/lighting/__init__.py @@ -1,3 +1,2 @@ from . import sky_lighting -from .caustics_lamp import CausticsLampFactory -from .glowing_rocks import GlowingRocksFactory \ No newline at end of file +from .caustics_lamp import CausticsLampFactory \ No newline at end of file diff --git a/infinigen/datagen/configs/compute_platform/slurm_1h.gin b/infinigen/datagen/configs/compute_platform/slurm_1h.gin index 2081ed89f..ab90e08b3 100644 --- a/infinigen/datagen/configs/compute_platform/slurm_1h.gin +++ b/infinigen/datagen/configs/compute_platform/slurm_1h.gin @@ -1,5 +1,7 @@ include 'compute_platform/slurm.gin' +slurm_submit_cmd.slurm_niceness = 0 + iterate_scene_tasks.view_block_size = 3 queue_combined.hours = 1 diff --git a/infinigen/datagen/export/README.md b/infinigen/datagen/export/README.md deleted file mode 100644 index ab6cffaa5..000000000 --- a/infinigen/datagen/export/README.md +++ /dev/null @@ -1,46 +0,0 @@ - -# Asset Exporter - -Export individaully generated assets in .blend files to various general-purpose file formats. - -Create a folder of ```.blend``` files and another empty folder for the export results. - -Then, run the following: -``` -python -m infinigen.datagen.export -b {PATH_TO_BLEND_FILE_FOLDER} -e {PATH_TO_OUTPUT_FOLDER} -o -r 1024 -``` - -Commandline options summary: -- ```-o``` will export in .obj format, -- ```-f``` will export in .fbx format -- ```-s``` will export in .stl format -- ```-p``` will export in .ply format. -- ```-v``` enables per-vertex colors (only compatible with .fbx and .ply formats). -- ```-r {INT}``` controls the resolution of the baked texture maps. For instance, ```-r 1024``` will export 1024 x 1024 texture maps. - -Only one file type can be specified for each export. - -## Known Issues and Limitations - -* Assets that use transparency or have fur will have incorrect textures when exporting. This is unavoidable due to texture maps being generated from baking. - -* When using the vertex color export option, no roughness will be exported, only diffuse color - -* Very big assets (e.g. full trees with leaves) may take a long time to export and will crash Blender if you do not have a sufficiently large amount of memory. The export results may also be unusably large. - -* When exporting in .fbx format, the embedded roughness texture maps in the file may sometimes be too bright or too dark. The .png roughness map in the folder is correct, however. - -* .ply bush exports will have missing leaves when uploaded to SketchFab, but are otherwise intact in other renderers such as Meshlab. - -* .fbx exports ocassionally fail due to invalid UV values on complicated geometry. Adjusting the 'island_margin' value in bpy.ops.uv.smart_project() sometimes remedies this - -* If using exported .obj files in PyTorch3D, make sure to use the TexturesAtlas because a mesh may have multiple associated texture maps - -* Loading .obj files with a PyTorch3D inside a online Google Colabs sessions often inexplicably fails - try hosting locally - -* The native PyTorch 3D renderer does not support roughness maps on .objs - - - - - diff --git a/infinigen/datagen/export/__init__.py b/infinigen/datagen/export/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/infinigen/datagen/export/export.py b/infinigen/datagen/export/export.py deleted file mode 100644 index 527b2f4d2..000000000 --- a/infinigen/datagen/export/export.py +++ /dev/null @@ -1,305 +0,0 @@ -# Copyright (c) Princeton University. -# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. -# Portions of bakeTexture heavily modified from https://blender.stackexchange.com/a/191841 - -# Authors: David Yan - - -import bpy -import os -import sys -import argparse -import shutil - -from infinigen.core.init import parse_args_blender - -def realizeInstances(obj): - for mod in obj.modifiers: - if (mod is None or mod.type != 'NODES'): continue - print(mod) - print(mod.node_group) - print("Realizing instances on " + obj.name) - geo_group = mod.node_group - outputNode = geo_group.nodes['Group Output'] - for link in geo_group.links: #search for link to the output node - if (link.to_node == outputNode): - print("Found Link!") - from_socket = link.from_socket - geo_group.links.remove(link) - realizeNode = geo_group.nodes.new(type = 'GeometryNodeRealizeInstances') - geo_group.links.new(realizeNode.inputs[0], from_socket) - geo_group.links.new(outputNode.inputs[0], realizeNode.outputs[0]) - print("Applying modifier") - obj.select_set(True) - bpy.context.view_layer.objects.active = obj - bpy.ops.object.modifier_apply(modifier= mod.name) - obj.select_set(True) - return - -def bakeVertexColors(obj): - obj.select_set(True) - bpy.context.view_layer.objects.active = obj - vertColor = bpy.context.object.data.color_attributes.new(name="VertColor",domain='CORNER',type='BYTE_COLOR') - bpy.context.object.data.attributes.active_color = vertColor - bpy.ops.object.bake(type='DIFFUSE', pass_filter={'COLOR'}, target ='VERTEX_COLORS') - obj.select_set(False) - -def bakeTexture(obj, dest, img_size): # modified from https://blender.stackexchange.com/a/191841 - obj.select_set(True) - bpy.context.view_layer.objects.active = obj - - imgDiffuse = bpy.data.images.new(obj.name + '_Diffuse',img_size,img_size) - imgRough = bpy.data.images.new(obj.name + '_Rough',img_size,img_size) - - #UV Unwrap - print("UV Unwrapping") - bpy.ops.object.mode_set(mode='EDIT') - bpy.ops.mesh.select_all(action='SELECT') - bpy.ops.uv.smart_project(island_margin= 0.001) - bpy.ops.object.mode_set(mode='OBJECT') - - diffuse_file_name = obj.name + '_Diffuse.png' - diffuse_file_path = os.path.join(dest, diffuse_file_name) - - metalDict = {} - noBSDF = False - - # Iterate on all objects and their materials and bake to an image texture - - # Diffuse pass - noMaterials = True - for slot in obj.material_slots: - mat = slot.material - if (mat is None): continue - noMaterials = False - print(mat.name) - slot.material = mat.copy() # we duplicate in the case of distinct meshes sharing materials - mat = slot.material - mat.use_nodes = True - nodes = mat.node_tree.nodes - - diffuse_node = nodes.new('ShaderNodeTexImage') - diffuse_node.name = 'Diffuse_node' - diffuse_node.image = imgDiffuse - nodes.active = diffuse_node - - if (nodes.get("Principled BSDF") is None): - noBSDF = True - else: - principled_bsdf_node = nodes["Principled BSDF"] - metalDict[mat.name] = principled_bsdf_node.inputs["Metallic"].default_value # store metallic value and set to 0 - principled_bsdf_node.inputs["Metallic"].default_value = 0 - - print(metalDict) - - if (noMaterials): - return - - print("Baking Diffuse...") - bpy.ops.object.bake(type='DIFFUSE',pass_filter={'COLOR'}, save_mode='EXTERNAL') - - # Roughness pass - for slot in obj.material_slots: - mat = slot.material - if (mat is None): continue - mat.use_nodes = True - nodes = mat.node_tree.nodes - rough_node = nodes.new('ShaderNodeTexImage') - rough_node.name = 'Rough_node' - rough_node.image = imgRough - nodes.active = rough_node - - rough_file_name = obj.name + '_Rough.png' - rough_file_path = os.path.join(dest, rough_file_name) - - print("Baking Roughness...") - bpy.ops.object.bake(type='ROUGHNESS', save_mode='EXTERNAL') - - print("Saving to " + diffuse_file_path) - print("Saving to " + rough_file_path) - - imgDiffuse.filepath_raw = diffuse_file_path - imgRough.filepath_raw = rough_file_path - imgDiffuse.save() - imgRough.save() - - for slot in obj.material_slots: - mat = slot.material - if (mat is None): continue - mat.use_nodes = True - nodes = mat.node_tree.nodes - print("Reapplying baked texs on " + mat.name) - - # delete all nodes except baked nodes and bsdf - for n in nodes: - excludedNodes = {'Principled BSDF','Material Output', "Rough_node", "Diffuse_node"} - if n.name not in excludedNodes: - nodes.remove(n) - - diffuse_node = nodes["Diffuse_node"] - rough_node = nodes["Rough_node"] - output = nodes["Material Output"] - - # stick baked texture in material - if (noBSDF): - principled_bsdf_node = nodes.new("ShaderNodeBsdfPrincipled") - else: - principled_bsdf_node = nodes["Principled BSDF"] - - links = mat.node_tree.links - - # create the new shader node links - links.new(output.inputs[0], principled_bsdf_node.outputs[0]) - links.new(principled_bsdf_node.inputs["Base Color"], diffuse_node.outputs[0]) - links.new(principled_bsdf_node.inputs["Roughness"], rough_node.outputs[0]) - - # bring back metallic values - if not noBSDF: - principled_bsdf_node.inputs["Metallic"].default_value = metalDict[mat.name] - - # strip spaces and dots from names - for slot in obj.material_slots: - mat = slot.material - if (mat is None): continue - mat.name = (mat.name).replace(' ','_') - mat.name = (mat.name).replace('.','_') - - obj.select_set(False) - - - -def main(args, source, dest): - for filename in os.listdir(source): - if not filename.endswith('.blend'): - continue - - # setting up directory and files - filePath = os.path.join(source, filename) - - bpy.ops.wm.open_mainfile(filepath = filePath) - - projName = bpy.path.basename(bpy.context.blend_data.filepath) #gets basename e.g. thisfile.blend - - baseName = os.path.splitext(projName)[0] #gets the filename without .blend extension e.g. thisfile - - folderPath = os.path.join(dest, baseName) # folder path with name of blend file - - if not os.path.exists(folderPath): - os.mkdir(folderPath) - - if args.obj: - exportName = baseName + ".obj" #changes extension - if args.fbx: - exportName = baseName + ".fbx" - if args.stl: - exportName = baseName + ".stl" - if args.ply: - exportName = baseName + ".ply" - - exportPath = os.path.join(folderPath, exportName) # path - - print("Exporting to " + exportPath) - - # some objects may be in a collection hidden from render - # but not actually hidden themselves. this hides those objects - for collection in bpy.data.collections: - if (collection.hide_render): - for obj in collection.objects: - obj.hide_render = True - - # remove grid - if (bpy.data.objects.get("Grid") is not None): - bpy.data.objects.remove(bpy.data.objects["Grid"], do_unlink=True) - - bpy.context.scene.render.engine = 'CYCLES' - bpy.context.scene.cycles.device = "GPU" - bpy.context.scene.cycles.samples = 1 # choose render sample - - # iterate through all objects and bake them - for obj in bpy.data.objects: - print("---------------------------") - print(obj.name) - - obj.name = (obj.name).replace(' ','_') - obj.name = (obj.name).replace('.','_') - - if obj.type != 'MESH': - print("Not mesh, skipping ...") - continue - - if obj.hide_render: - print("Mesh hidden from render, skipping ...") - continue - - if (len(obj.data.vertices) == 0): - print("Mesh has no vertices, skipping ...") - continue - - realizeInstances(obj) - if args.stl: - continue - if args.vertex_colors: - bakeVertexColors(obj) - continue - bpy.ops.object.select_all(action='DESELECT') - bakeTexture(obj,folderPath, args.resolution) - - # remove all the hidden objects - for obj in bpy.data.objects: - if obj.hide_render: - bpy.data.objects.remove(obj, do_unlink=True) - - if args.obj: - bpy.ops.export_scene.obj(filepath = exportPath, path_mode='COPY', use_materials =True) - - if args.fbx: - if args.vertex_colors: - bpy.ops.export_scene.fbx(filepath = exportPath, colors_type='SRGB') - else: - bpy.ops.export_scene.fbx(filepath = exportPath, path_mode='COPY', embed_textures = True) - - if args.stl: - bpy.ops.export_mesh.stl(filepath = exportPath) - - if args.ply: - bpy.ops.export_mesh.ply(filepath = exportPath) - - shutil.make_archive(folderPath, 'zip', folderPath) - shutil.rmtree(folderPath) - - bpy.ops.wm.quit_blender() - -def dir_path(string): - if os.path.isdir(string): - return string - else: - raise NotADirectoryError(string) - -def make_args(): - parser = argparse.ArgumentParser() - group = parser.add_mutually_exclusive_group(required=True) - - parser.add_argument('-b', '--blend_folder', type=dir_path) - parser.add_argument('-e', '--export_folder', type=dir_path) - - group.add_argument('-f', '--fbx', action = 'store_true') # fbx export has some minor issues with roughness map accuracy - group.add_argument('-o', '--obj', action = 'store_true') - group.add_argument('-s', '--stl', action = 'store_true') - group.add_argument('-p', '--ply', action = 'store_true') - - parser.add_argument('-v', '--vertex_colors', action = 'store_true') - parser.add_argument('-r', '--resolution', default= 1024, type=int) - - args = parse_args_blender(parser) - - if (args.vertex_colors and (args.obj or args.stl)): - raise ValueError("File format does not support vertex colors.") - - if (args.ply and not args.vertex_colors): - raise ValueError(".ply export must use vertex colors.") - - return args - -if __name__ == '__main__': - args = make_args() - main(args, args.blend_folder, args.export_folder) diff --git a/infinigen/tools/export.py b/infinigen/tools/export.py new file mode 100644 index 000000000..346cbe30d --- /dev/null +++ b/infinigen/tools/export.py @@ -0,0 +1,632 @@ +# Copyright (c) Princeton University. +# This source code is licensed under the BSD 3-Clause license found in the LICENSE file in the root directory of this source tree. +# Authors: David Yan + + +import bpy +import os +import sys +import argparse +import shutil +import subprocess +import logging + +from pathlib import Path + +FORMAT_CHOICES = ["fbx", "obj", "usdc", "usda" "stl", "ply"] +BAKE_TYPES = {'DIFFUSE': 'Base Color', 'ROUGHNESS': 'Roughness'} # 'EMIT':'Emission' # "GLOSSY": 'Specular', 'TRANSMISSION':'Transmission' don't export +SPECIAL_BAKE = {'METAL': 'Metallic'} + +def apply_all_modifiers(obj): + for mod in obj.modifiers: + if (mod is None): continue + try: + obj.select_set(True) + bpy.context.view_layer.objects.active = obj + bpy.ops.object.modifier_apply(modifier=mod.name) + logging.info(f"Applied modifier {mod.name} on {obj}") + obj.select_set(False) + except RuntimeError: + logging.info(f"Can't apply {mod.name} on {obj}") + obj.select_set(False) + return + +def realizeInstances(obj): + for mod in obj.modifiers: + if (mod is None or mod.type != 'NODES'): continue + geo_group = mod.node_group + outputNode = geo_group.nodes['Group Output'] + + logging.info(f"Realizing instances on {mod.name}") + link = outputNode.inputs[0].links[0] + from_socket = link.from_socket + geo_group.links.remove(link) + realizeNode = geo_group.nodes.new(type = 'GeometryNodeRealizeInstances') + geo_group.links.new(realizeNode.inputs[0], from_socket) + geo_group.links.new(outputNode.inputs[0], realizeNode.outputs[0]) + +def remove_shade_smooth(obj): + for mod in obj.modifiers: + if (mod is None or mod.type != 'NODES'): continue + geo_group = mod.node_group + outputNode = geo_group.nodes['Group Output'] + if geo_group.nodes.get('Set Shade Smooth'): + logging.info("Removing shade smooth on " + obj.name) + smooth_node = geo_group.nodes['Set Shade Smooth'] + else: + continue + + link = smooth_node.inputs[0].links[0] + from_socket = link.from_socket + geo_group.links.remove(link) + geo_group.links.new(outputNode.inputs[0], from_socket) + +def check_material_geonode(node_tree): + if node_tree.nodes.get("Set Material"): + logging.info("Found set material!") + return True + + for node in node_tree.nodes: + if node.type == 'GROUP' and check_material_geonode(node.node_tree): + return True + + return False + +def handle_geo_modifiers(obj, export_usd): + has_geo_nodes = False + for mod in obj.modifiers: + if (mod is None or mod.type != 'NODES'): continue + has_geo_nodes = True + + if has_geo_nodes and not obj.data.materials: + mat = bpy.data.materials.new(name=f"{mod.name} shader") + obj.data.materials.append(mat) + mat.use_nodes = True + mat.node_tree.nodes.remove(mat.node_tree.nodes["Principled BSDF"]) + + if not export_usd: + realizeInstances(obj) + +def clean_names(): + for obj in bpy.data.objects: + obj.name = (obj.name).replace(' ','_') + obj.name = (obj.name).replace('.','_') + + if obj.type == 'MESH': + for uv_map in obj.data.uv_layers: + uv_map.name = uv_map.name.replace('.', '_') # if uv has '.' in name the node will export wrong in USD + + for mat in bpy.data.materials: + if (mat is None): continue + mat.name = (mat.name).replace(' ','_') + mat.name = (mat.name).replace('.','_') + +def remove_obj_parents(): + for obj in bpy.data.objects: + world_loc = obj.matrix_world.to_translation() + obj.parent = None + obj.matrix_world.translation = world_loc + +def update_visibility(export_usd): + outliner_area = next(a for a in bpy.context.screen.areas if a.type == 'OUTLINER') + space = outliner_area.spaces[0] + space.show_restrict_column_viewport = True # Global visibility (Monitor icon) + revealed_collections = [] + hidden_objs = [] + for collection in bpy.data.collections: + if export_usd: + collection.hide_viewport = False #reenables viewports for all + # enables renders for all collections + if collection.hide_render: + collection.hide_render = False + revealed_collections.append(collection) + + elif collection.hide_render: # hides assets if we are realizing instances + for obj in collection.objects: + obj.hide_render = True + + # disables viewports and renders for all objs + if export_usd: + for obj in bpy.data.objects: + obj.hide_viewport = True + if not obj.hide_render: + hidden_objs.append(obj) + obj.hide_render = True + + return revealed_collections, hidden_objs + +def uv_unwrap(obj): + obj.select_set(True) + bpy.context.view_layer.objects.active = obj + + obj.data.uv_layers.new(name='ExportUV') + bpy.context.object.data.uv_layers['ExportUV'].active = True + + logging.info("UV Unwrapping") + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.select_all(action='SELECT') + try: + bpy.ops.uv.smart_project() + except RuntimeError: + logging.info("UV Unwrap failed, skipping mesh") + bpy.ops.object.mode_set(mode='OBJECT') + obj.select_set(False) + return False + bpy.ops.object.mode_set(mode='OBJECT') + obj.select_set(False) + return True + +def bakeVertexColors(obj): + bpy.ops.object.select_all(action='DESELECT') + obj.select_set(True) + bpy.context.view_layer.objects.active = obj + vertColor = bpy.context.object.data.color_attributes.new(name='VertColor',domain='CORNER',type='BYTE_COLOR') + bpy.context.object.data.attributes.active_color = vertColor + bpy.ops.object.bake(type='DIFFUSE', pass_filter={'COLOR'}, target ='VERTEX_COLORS') + obj.select_set(False) + +def apply_baked_tex(obj, paramDict={}): + bpy.context.view_layer.objects.active = obj + bpy.context.object.data.uv_layers['ExportUV'].active_render = True + for slot in obj.material_slots: + mat = slot.material + if (mat is None): + continue + mat.use_nodes = True + nodes = mat.node_tree.nodes + logging.info("Reapplying baked texs on " + mat.name) + + # delete all nodes except baked nodes and bsdf + excludedNodes = [type + '_node' for type in BAKE_TYPES] + excludedNodes.extend([type + '_node' for type in SPECIAL_BAKE]) + excludedNodes.extend(['Material Output','Principled BSDF']) + for n in nodes: + if n.name not in excludedNodes: + nodes.remove(n) # deletes an arbitrary principled BSDF in the case of a mix, which is handled below + + output = nodes['Material Output'] + + # stick baked texture in material + if nodes.get('Principled BSDF') is None: # no bsdf + logging.info("No BSDF, creating new one") + principled_bsdf_node = nodes.new('ShaderNodeBsdfPrincipled') + elif len(output.inputs[0].links) != 0 and output.inputs[0].links[0].from_node.bl_idname == 'ShaderNodeBsdfPrincipled': # trivial bsdf graph + logging.info("Trivial shader graph, using old BSDF") + principled_bsdf_node = nodes['Principled BSDF'] + else: + logging.info("Non-trivial shader graph, creating new BSDF") + nodes.remove(nodes['Principled BSDF']) # shader graph was a mix of bsdfs + principled_bsdf_node = nodes.new('ShaderNodeBsdfPrincipled') + + links = mat.node_tree.links + + # create the new shader node links + links.new(output.inputs[0], principled_bsdf_node.outputs[0]) + for type in BAKE_TYPES: + if not nodes.get(type + '_node'): continue + tex_node = nodes[type + '_node'] + links.new(principled_bsdf_node.inputs[BAKE_TYPES[type]], tex_node.outputs[0]) + for type in SPECIAL_BAKE: + if not nodes.get(type + '_node'): continue + tex_node = nodes[type + '_node'] + links.new(principled_bsdf_node.inputs[BAKE_TYPES[type]], tex_node.outputs[0]) + + # bring back cleared param values + if mat.name in paramDict: + principled_bsdf_node.inputs['Metallic'].default_value = paramDict[mat.name]['Metallic'] + principled_bsdf_node.inputs['Sheen'].default_value = paramDict[mat.name]['Sheen'] + principled_bsdf_node.inputs['Clearcoat'].default_value = paramDict[mat.name]['Clearcoat'] + +def create_glass_shader(node_tree): + nodes = node_tree.nodes + color = nodes['Glass BSDF'].inputs[0].default_value + roughness = nodes['Glass BSDF'].inputs[1].default_value + ior = nodes['Glass BSDF'].inputs[2].default_value + if nodes.get('Principled BSDF'): + nodes.remove(nodes['Principled BSDF']) + + principled_bsdf_node = nodes.new('ShaderNodeBsdfPrincipled') + principled_bsdf_node.inputs['Base Color'].default_value = color + principled_bsdf_node.inputs['Roughness'].default_value = roughness + principled_bsdf_node.inputs['IOR'].default_value = ior + principled_bsdf_node.inputs['Transmission'].default_value = 1 + node_tree.links.new(principled_bsdf_node.outputs[0], nodes['Material Output'].inputs[0]) + +def process_glass_materials(obj): + for slot in obj.material_slots: + mat = slot.material + if (mat is None or not mat.use_nodes): continue + nodes = mat.node_tree.nodes + outputNode = nodes['Material Output'] + if nodes.get('Glass BSDF'): + if outputNode.inputs[0].links[0].from_node.bl_idname == 'ShaderNodeBsdfGlass': + create_glass_shader(mat.node_tree) + else: + logging.info(f"Non-trivial glass material on {obj.name}, material export will be inaccurate") + +def bake_pass( + obj, + dest: Path, + img_size, + bake_type, +): + + img = bpy.data.images.new(f'{obj.name}_{bake_type}',img_size,img_size) + clean_name = (obj.name).replace(' ','_').replace('.','_') + file_path = dest/f'{clean_name}_{bake_type}.png' + dest = dest/'textures' + + bake_obj = False + bake_exclude_mats = {} + + # materials are stored as stack so when removing traverse the reversed list + for index, slot in reversed(list(enumerate(obj.material_slots))): + mat = slot.material + if mat is None: + bpy.context.object.active_material_index = index + bpy.ops.object.material_slot_remove() + continue + + logging.info(mat.name) + mat.use_nodes = True + nodes = mat.node_tree.nodes + + output = nodes["Material Output"] + + img_node = nodes.new('ShaderNodeTexImage') + img_node.name = f'{bake_type}_node' + img_node.image = img + nodes.active = img_node + + if len(output.inputs[0].links) != 0: + surface_node = output.inputs[0].links[0].from_node + if surface_node.bl_idname == 'ShaderNodeBsdfPrincipled' and len(surface_node.inputs[BAKE_TYPES[bake_type]].links) == 0: # trivial bsdf graph + logging.info(f"{mat.name} has no procedural input for {bake_type}, not using baked textures") + bake_exclude_mats[mat] = img_node + continue + + bake_obj = True + + if (bake_type == 'METAL'): + internal_bake_type = 'EMIT' + else: + internal_bake_type = bake_type + + if bake_obj: + logging.info(f'Baking {bake_type} pass') + bpy.ops.object.bake(type=internal_bake_type, pass_filter={'COLOR'}, save_mode='EXTERNAL') + img.filepath_raw = str(file_path) + img.save() + logging.info(f"Saving to {file_path}") + else: + logging.info(f"No necessary materials to bake on {obj.name}, skipping bake") + + for mat, img_node in bake_exclude_mats.items(): + mat.node_tree.nodes.remove(img_node) + +def bake_metal(obj, dest, img_size): # metal baking is not really set up for node graphs w/ 2 mixed BSDFs. + metal_map_mats = [] + for slot in obj.material_slots: + mat = slot.material + if (mat is None or not mat.use_nodes): continue + nodes = mat.node_tree.nodes + if nodes.get('Principled BSDF') and nodes.get('Material Output'): + principled_bsdf_node = nodes['Principled BSDF'] + outputNode = nodes['Material Output'] + else: continue + + links = mat.node_tree.links + + if len(principled_bsdf_node.inputs['Metallic'].links) != 0: + link = principled_bsdf_node.inputs['Metallic'].links[0] + from_socket = link.from_socket + links.remove(link) + links.new(outputNode.inputs[0], from_socket) + metal_map_mats.append(mat) + + if len(metal_map_mats) != 0: + bake_pass(obj, dest, img_size, 'METAL') + + for mat in metal_map_mats: + links.remove(outputNode.inputs[0].links[0]) + links.new(outputNode.inputs[0], principled_bsdf_node.outputs[0]) + +def remove_params(mat, node_tree): + paramDict = {} + nodes = node_tree.nodes + if nodes.get('Material Output'): + output = nodes['Material Output'] + elif nodes.get('Group Output'): + output = nodes['Group Output'] + else: + raise ValueError("Could not find material output node") + if nodes.get('Principled BSDF') and output.inputs[0].links[0].from_node.bl_idname == 'ShaderNodeBsdfPrincipled': + principled_bsdf_node = nodes['Principled BSDF'] + metal = principled_bsdf_node.inputs['Metallic'].default_value # store metallic value and set to 0 + sheen = principled_bsdf_node.inputs['Sheen'].default_value + clearcoat = principled_bsdf_node.inputs['Clearcoat'].default_value + paramDict[mat.name] = {'Metallic': metal, 'Sheen': sheen, 'Clearcoat': clearcoat} + principled_bsdf_node.inputs['Metallic'].default_value = 0 + principled_bsdf_node.inputs['Sheen'].default_value = 0 + principled_bsdf_node.inputs['Clearcoat'].default_value = 0 + return paramDict + +def process_interfering_params(obj): + for slot in obj.material_slots: + mat = slot.material + if (mat is None or not mat.use_nodes): continue + paramDict = remove_params(mat, mat.node_tree) + if len(paramDict) == 0: + for node in mat.node_tree.nodes: # only handles one level of sub-groups + if node.type == 'GROUP': + paramDict = remove_params(mat, node.node_tree) + + return paramDict + +def bake_object(obj, dest, img_size): + if not uv_unwrap(obj): + return + + bpy.ops.object.select_all(action='DESELECT') + obj.select_set(True) + + for slot in obj.material_slots: + mat = slot.material + if mat is not None: + slot.material = mat.copy() # we duplicate in the case of distinct meshes sharing materials + + process_glass_materials(obj) + + bake_metal(obj, dest, img_size) + + paramDict = process_interfering_params(obj) + + for bake_type in BAKE_TYPES: + bake_pass(obj, dest, img_size, bake_type) + + apply_baked_tex(obj, paramDict) + + obj.select_set(False) + +def skipBake(obj, export_usd): + if not obj.data.materials: + logging.info("No material on mesh, skipping...") + return True + + if obj.hide_render and not export_usd: + logging.info("Mesh hidden from render, skipping ...") + return True + + if len(obj.data.vertices) == 0: + logging.info("Mesh has no vertices, skipping ...") + return True + + return False + +def bake_scene(folderPath: Path, image_res, vertex_colors, export_usd): + + for obj in bpy.data.objects: + logging.info("---------------------------") + logging.info(obj.name) + + if obj.type != 'MESH' or obj not in list(bpy.context.view_layer.objects): + logging.info("Not mesh, skipping ...") + continue + + handle_geo_modifiers(obj, export_usd) + + if skipBake(obj, export_usd): continue + + if format == "stl": + continue + + if vertex_colors: + bakeVertexColors(obj) + continue + + if export_usd: + obj.hide_render = False + obj.hide_viewport = False + + bake_object(obj, folderPath, image_res) + + if export_usd: + obj.hide_render = True + obj.hide_viewport = True + +def run_export(exportPath: Path, format: str, vertex_colors: bool, individual_export: bool): + + assert exportPath.parent.exists() + exportPath = str(exportPath) + + if format == "obj": + if vertex_colors: + bpy.ops.wm.obj_export(filepath = exportPath, export_colors=True, export_selected_objects=individual_export) + else: + bpy.ops.wm.obj_export(filepath = exportPath, path_mode='COPY', export_materials=True, export_pbr_extensions=True, export_selected_objects=individual_export) + + if format == "fbx": + if vertex_colors: + bpy.ops.export_scene.fbx(filepath = exportPath, colors_type='SRGB', use_selection = individual_export) + else: + bpy.ops.export_scene.fbx(filepath = exportPath, path_mode='COPY', embed_textures = True, use_selection=individual_export) + + if format == "stl": bpy.ops.export_mesh.stl(filepath = exportPath, use_selection = individual_export) + + if format == "ply": bpy.ops.export_mesh.ply(filepath = exportPath, export_selected_objects = individual_export) + + if format in ["usda", "usdc"]: bpy.ops.wm.usd_export(filepath = exportPath, export_textures=True, use_instancing=True, selected_objects_only=individual_export) + +def export_scene( + input_blend: Path, + output_folder: Path, + pipeline_folder=None, + task_uniqname=None, + **kwargs, +): + + bpy.ops.wm.open_mainfile(filepath=str(input_blend)) + + folder = output_folder/input_blend.name + folder.mkdir(exist_ok=True, parents=True) + result = export_curr_scene(folder, **kwargs) + + if pipeline_folder is not None and task_uniqname is not None : + (pipeline_folder / "logs" / f"FINISH_{task_uniqname}").touch() + + return result + +def export_curr_scene( + output_folder: Path, + format: str, + image_res: int, + vertex_colors=False, + individual_export=False, + pipeline_folder=None, + task_uniqname=None +) -> Path: + + export_usd = format in ["usda", "usdc"] + + export_folder = output_folder + export_folder.mkdir(exist_ok=True) + export_file = export_folder/output_folder.with_suffix(f'.{format}').name + + logging.info(f"Exporting to directory {export_folder=}") + + # remove grid + if bpy.data.objects.get("Grid"): + bpy.data.objects.remove(bpy.data.objects["Grid"], do_unlink=True) + + remove_obj_parents() + + scatter_cols = [] + if export_usd: + if bpy.data.collections.get("scatter"): + scatter_cols.append(bpy.data.collections["scatter"]) + if bpy.data.collections.get("scatters"): + scatter_cols.append(bpy.data.collections["scatters"]) + for col in scatter_cols: + for obj in col.all_objects: + remove_shade_smooth(obj) + + # remove 0 polygon meshes except for scatters + # if export_usd: + # for obj in bpy.data.objects: + # if obj.type == 'MESH' and len(obj.data.polygons) == 0: + # if scatter_cols is not None: + # if any(x in scatter_cols for x in obj.users_collection): + # continue + # logging.info(f"{obj.name} has no faces, removing...") + # bpy.data.objects.remove(obj, do_unlink=True) + + revealed_collections, hidden_objs = update_visibility(export_usd) + + bpy.context.scene.render.engine = 'CYCLES' + bpy.context.scene.cycles.device = 'GPU' + bpy.context.scene.cycles.samples = 1 # choose render sample + # Set the tile size + bpy.context.scene.cycles.tile_x = image_res + bpy.context.scene.cycles.tile_y = image_res + + # iterate through all objects and bake them + bake_scene( + folderPath=export_folder/'textures', + image_res=image_res, + vertex_colors=vertex_colors, + export_usd=export_usd + ) + + for collection in revealed_collections: + logging.info(f"Hiding collection {collection.name} from render") + collection.hide_render = True + + for obj in hidden_objs: + logging.info(f"Unhiding object {obj.name} from render") + obj.hide_render = False + + # remove all hidden assets if we realized + if not export_usd: + for obj in bpy.data.objects: + if obj.hide_render: + bpy.data.objects.remove(obj, do_unlink=True) + + clean_names() + + if individual_export: + bpy.ops.object.select_all(action='SELECT') + bpy.ops.object.location_clear() # send all objects to (0,0,0) + bpy.ops.object.select_all(action='DESELECT') + for obj in bpy.data.objects: + if obj.type != 'MESH' or obj.hide_render or len(obj.data.vertices) == 0 or obj not in list(bpy.context.view_layer.objects): + continue + + export_subfolder = export_folder/obj.name + export_subfolder.mkdir(exist_ok=True) + export_file = export_subfolder/f'{obj.name}.{format}' + + logging.info(f"Exporting file to {export_file=}") + obj.hide_viewport = False + obj.select_set(True) + run_export(export_file, format, vertex_colors, individual_export) + obj.select_set(False) + else: + logging.info(f"Exporting file to {export_file=}") + run_export(export_file, format, vertex_colors, individual_export) + + return export_folder + +def main(args): + + args.output_folder.mkdir(exist_ok=True) + logging.basicConfig(level=logging.DEBUG) + + targets = sorted(list(args.input_folder.iterdir())) + for blendfile in targets: + + if not blendfile.suffix == '.blend': + print(f'Skipping non-blend file {blendfile}') + continue + + folder = export_scene( + blendfile, + args.output_folder, + format=args.format, + image_res=args.resolution, + vertex_colors=args.vertex_colors, + individual_export=args.individual, + ) + + # wanted to use shutil here but kept making corrupted files + subprocess.call(['zip', '-r', str(folder.absolute().with_suffix('.zip')), str(folder.absolute())]) + + bpy.ops.wm.quit_blender() + +def make_args(): + parser = argparse.ArgumentParser() + + parser.add_argument('--input_folder', type=Path) + parser.add_argument('--output_folder', type=Path) + + parser.add_argument('-f', '--format', type=str, choices=FORMAT_CHOICES) + + parser.add_argument('-v', '--vertex_colors', action = 'store_true') + parser.add_argument('-r', '--resolution', default= 1024, type=int) + parser.add_argument('-i', '--individual', action = 'store_true') + + args = parser.parse_args() + + if args.format not in FORMAT_CHOICES: + raise ValueError("Unsupported or invalid file format.") + + if args.vertex_colors and args.format not in ["ply", "fbx", "obj"]: + raise ValueError("File format does not support vertex colors.") + + if (args.format == "ply" and not args.vertex_colors): + raise ValueError(".ply export must use vertex colors.") + + return args + +if __name__ == '__main__': + args = make_args() + main(args) diff --git a/infinigen_examples/generate_individual_assets.py b/infinigen_examples/generate_individual_assets.py index 270a5d705..177e0aa34 100644 --- a/infinigen_examples/generate_individual_assets.py +++ b/infinigen_examples/generate_individual_assets.py @@ -18,6 +18,7 @@ from itertools import product from pathlib import Path import logging +from multiprocessing import Pool logging.basicConfig( format='[%(asctime)s.%(msecs)03d] [%(name)s] [%(levelname)s] | %(message)s', @@ -30,6 +31,8 @@ import numpy as np from PIL import Image +import submitit + from infinigen.assets.fluid.fluid import set_obj_on_fire from infinigen.assets.utils.decorate import assign_material, read_base_co from infinigen.assets.utils.tag import tag_object, tag_nodegroup, tag_system @@ -43,7 +46,27 @@ from infinigen.core.util.logging import Suppress from infinigen.core.util import blender as butil -from infinigen.tools.results import strip_alpha_background as strip_alpha_background +from infinigen.tools import export + +def load_txt_list(path, skip_sharp=False): + res = (Path(__file__).parent/path).read_text().splitlines() + res = [ + f.lstrip('#').lstrip(' ') + for f in res if + len(f) > 0 and not '#' in f + ] + print(res) + return res + +def load_txt_list(path, skip_sharp=False): + res = (Path(__file__).parent/path).read_text().splitlines() + res = [ + f.lstrip('#').lstrip(' ') + for f in res if + len(f) > 0 and not '#' in f + ] + print(res) + return res from . import generate_nature # to load most/all factory.AssetFactory subclasses @@ -67,7 +90,9 @@ def build_scene_asset(factory_name, idx): raise e factory.finalize_assets(asset) if args.fire: - set_obj_on_fire(asset,0,resolution = args.fire_res, simulation_duration = args.fire_duration, noise_scale=2, add_turbulence = True, adaptive_domain = False) + from infinigen.assets.fluid.fluid import set_obj_on_fire + set_obj_on_fire(asset, 0, resolution=args.fire_res, simulation_duration=args.fire_duration, + noise_scale=2, add_turbulence=True, adaptive_domain=False) bpy.context.scene.frame_set(args.fire_duration) bpy.context.scene.frame_end = args.fire_duration bpy.data.worlds['World'].node_tree.nodes["Background.001"].inputs[1].default_value = 0.04 @@ -134,7 +159,24 @@ def build_scene_surface(factory_name, idx): return asset -def build_scene(path, idx, factory_name, args): +def build_and_save_asset(payload: dict): + + # unpack payload - args are packed into payload for compatibility with slurm/multiprocessing + factory_name = payload['fac'] + args = payload['args'] + idx = payload['idx'] + + if args.seed > 0: + idx = args.seed + + if args.gpu: + enable_gpu() + + path = args.output_folder / factory_name + if path and args.skip_existing: + return + path.mkdir(exist_ok=True) + scene = bpy.context.scene scene.render.engine = 'CYCLES' scene.render.resolution_x, scene.render.resolution_y = map(int, args.resolution.split('x')) @@ -195,6 +237,28 @@ def build_scene(path, idx, factory_name, args): imgpath = path / f"frames/scene_{idx:03d}/frame_###.png" scene.render.filepath = str(imgpath) bpy.ops.render.render(animation=True) + elif args.render == 'none': + pass + else: + raise ValueError(f'Unrecognized {args.render=}') + + if args.export is not None: + export_path = path/'export'/f'export_{idx:03d}' + export_path.mkdir(exist_ok=True, parents=True) + export.export_curr_scene( + export_path, + format=args.export, + image_res=args.export_texture_res + ) + + if args.export is not None: + export_path = path/'export'/f'export_{idx:03d}' + export_path.mkdir(exist_ok=True, parents=True) + export.export_curr_scene( + export_path, + format=args.export, + image_res=args.export_texture_res + ) def parent(obj): @@ -262,12 +326,31 @@ def setup_camera(args): cam_info_ng.nodes['Object Info'].inputs['Object'].default_value = camera return camera, camera.parent - - - def subclasses(cls): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in subclasses(c)]) +def mapfunc(f, its, args): + if args.n_workers == 1: + return [f(i) for i in its] + elif not args.slurm: + with Pool(args.n_workers) as p: + return list(p.imap(f, its)) + else: + executor = submitit.AutoExecutor( + folder=args.output_folder/'logs' + ) + executor.update_parameters( + name=args.output_folder.name, + timeout_min=60, + cpus_per_task=2, + mem_gb=8, + slurm_partition=os.environ['INFINIGEN_SLURMPARTITION'], + slurm_array_parallelism=args.n_workers + ) + jobs = executor.map_array(f, its) + for j in jobs: + print(f'Job finished {j.wait()}') + def main(args): bpy.context.window.workspace = bpy.data.workspaces['Geometry Nodes'] @@ -285,13 +368,6 @@ def main(args): ) logging.getLogger("infinigen").setLevel(args.loglevel) - name = '_'.join(args.factories) - path = Path(os.getcwd()) / 'outputs' / name - path.mkdir(exist_ok=True) - - if args.gpu: - enable_gpu() - factories = list(args.factories) if 'ALL_ASSETS' in factories: factories += [f.__name__ for f in subclasses(factory.AssetFactory)] @@ -303,26 +379,24 @@ def main(args): factories += [f.stem for f in Path('infinigen/assets/materials').iterdir()] factories.remove('ALL_MATERIALS') + args.output_folder.mkdir(exist_ok=True) + + if not args.postprocessing_only: + for fac in factories: + targets = [ + {'args': args, 'fac': fac, 'idx': idx} + for idx in range(args.n_images) + ] + mapfunc(build_and_save_asset, targets, args) + for fac in factories: - fac_path = path / fac - if fac_path.exists() and args.skip_existing: - continue - fac_path.mkdir(exist_ok=True) - n_images = args.n_images - if not args.postprocessing_only: - for idx in range(n_images): - if args.seed >= 0: idx = args.seed - build_scene(fac_path, idx, fac, args) - try: - pass - except Exception as e: - print(e) - continue + fac_path = args.output_folder/fac + assert fac_path.exists() if args.render == 'image': - make_grid(args, fac_path, n_images) + make_grid(args, fac_path, args.n_images) if args.render == 'video': (fac_path / 'videos').mkdir(exist_ok=True) - for i in range(n_images): + for i in range(args.n_images): subprocess.run( f'ffmpeg -y -r 24 -pattern_type glob -i "{fac_path}/frames/scene_{i:03d}/frame*.png" ' f'{fac_path}/videos/video_{i:03d}.mp4', shell=True) @@ -332,9 +406,9 @@ def snake_case(s): return '_'.join( re.sub('([A-Z][a-z]+)', r' \1', re.sub('([A-Z]+)', r' \1', s.replace('-', ' '))).split()).lower() - def make_args(): parser = argparse.ArgumentParser() + parser.add_argument('--output_folder', type=Path) parser.add_argument('-f', '--factories', default=[], nargs='+', help="List factories/surface scatters/surface materials you want to render") parser.add_argument('-n', '--n_images', default=4, type=int, help="Number of scenes to render") @@ -354,7 +428,7 @@ def make_args(): parser.add_argument('-a', '--cam_angle', default=(-30, 0, 0), type=float, nargs='+', help="Camera rotation in XYZ") parser.add_argument('-c', '--cam_center', default=1, type=int, help="Camera rotation in XYZ") - parser.add_argument('-r', '--render', default='image', type=str, + parser.add_argument('-r', '--render', default='image', type=str, choices=['image', 'video', 'none'], help="Whether to render the scene in images or video") parser.add_argument('-b', '--best_ratio', default=9 / 16, type=float, help="Best aspect ratio for compiling the images into asset grid") @@ -371,6 +445,12 @@ def make_args(): parser.add_argument('-D', '--seed', type=int, default=-1, help="Run a specific seed.") parser.add_argument('-d', '--debug', action="store_const", dest="loglevel", const=logging.DEBUG, default=logging.INFO) + parser.add_argument('--n_workers', type=int, default=1) + parser.add_argument('--slurm', action='store_true') + + parser.add_argument('--export', type=str, default=None, choices=export.FORMAT_CHOICES) + parser.add_argument('--export_texture_res', type=int, default=1024) + return init.parse_args_blender(parser) if __name__ == '__main__': diff --git a/tests/integration/manual_integration_check.py b/tests/integration/manual_integration_check.py index baa47d00f..2d36953fe 100644 --- a/tests/integration/manual_integration_check.py +++ b/tests/integration/manual_integration_check.py @@ -122,7 +122,7 @@ def parse_scene_log(scene_path, step_times, asset_time_data, poly_data, asset_me all_data[seed]["[" + step + "] Step Time"] = step_timedelta # parse times < 1 day - for name, h, m, s in re.findall(r'INFO:times:\[(.*?)\] finished in ([0-9]+):([0-9]+):([0-9]+)', text): + for name, h, m, s in re.findall(r'\[INFO\] \| \[(.*?)\] finished in ([0-9]+):([0-9]+):([0-9]+)', text): timedelta_obj = timedelta(hours=int(h), minutes=int(m), seconds=int(s)) if (name == "MAIN TOTAL"): continue else: @@ -140,7 +140,7 @@ def parse_scene_log(scene_path, step_times, asset_time_data, poly_data, asset_me all_data[seed]["[time] " + stage_key] = timedelta_obj # parse times > 1 day - for name, d, h, m, s in re.findall(r'INFO:times:\[(.*?)\] finished in ([0-9]) day.*, ([0-9]+):([0-9]+):([0-9]+)', text): + for name, d, h, m, s in re.findall(r'\[INFO\] \| \[(.*?)\] finished in ([0-9]) day.*, ([0-9]+):([0-9]+):([0-9]+)', text): timedelta_obj = timedelta(days=int(d), hours=int(h),minutes=int(m),seconds=int(s)) if (name == "MAIN TOTAL"): continue else: @@ -195,7 +195,7 @@ def parse_scene_log(scene_path, step_times, asset_time_data, poly_data, asset_me all_data[seed]["[Objects Generated] [Coarse] " + row["name"]] = row["obj_delta"] all_data[seed]["[Instances Generated] [Coarse] " + row["name"]] = row["instance_delta"] - fine_stage_df = pd.read_csv(os.path.join(coarse_folder, "pipeline_fine.csv")) # this is supposed to be coarse folder + fine_stage_df = pd.read_csv(os.path.join(fine_folder, "pipeline_fine.csv")) # this is supposed to be coarse folder fine_stage_df["mem_delta"] = fine_stage_df[fine_stage_df['ran']]['mem_at_finish'].diff() fine_stage_df["obj_delta"] = fine_stage_df[fine_stage_df['ran']]['obj_count'].diff() fine_stage_df["instance_delta"] = fine_stage_df[fine_stage_df['ran']]['instance_count'].diff() @@ -501,7 +501,7 @@ def main(dir, time): test_logs(dir) except Exception as e: print(e) - + if time is None: print("\nNo slurm time arg provided, skipping scene memory stats") else: