Skip to content

Commit

Permalink
Bugfix tag segmentation, render throughput, upload cleanup, crashing …
Browse files Browse the repository at this point in the history
…on symlinks

* Revert monocular video fineterrain/populate ordering

* Fix jobs getting stuck due to max_stuck_at_numdone settings

* Fix upload cleanup

* Fix crashing on symlinks

Bugfix tag segmentation, render throughput, upload cleanup, crashing on symlinks

* Revert monocular video fineterrain/populate ordering

* Fix jobs getting stuck due to max_stuck_at_numdone settings

* Fix upload cleanup

* Fix crashing on symlinks
  • Loading branch information
araistrick authored and pvl-bot committed Oct 15, 2023
1 parent b7b431f commit f1a27a9
Show file tree
Hide file tree
Showing 5 changed files with 53 additions and 30 deletions.
2 changes: 1 addition & 1 deletion worldgen/rendering/render.py
Original file line number Diff line number Diff line change
Expand Up @@ -326,7 +326,7 @@ def render_image(
saving_ground_truth=flat_shading
)

indices = dict(cam_rig=camera_rig_id, resample=0, subcam_id=subcam_id)
indices = dict(cam_rig=camera_rig_id, resample=0, subcam=subcam_id)

## Update output names
fileslot_suffix = get_suffix({'frame': "####", **indices})
Expand Down
10 changes: 5 additions & 5 deletions worldgen/tools/datarelease_toolkit.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,14 +312,14 @@ def fix_missing_camviewdata(local_folder, dummy):
def reorganize_old_framesfolder(frames_old):

frames_old = Path(frames_old)
frames_dest = frames_old.parent/"frames"

for img_path in frames_old.iterdir():
for p in frames_old.iterdir():
if p.is_symlink():
p.unlink()

if img_path.name == 'assets':
img_path.unlink()
continue
frames_dest = frames_old.parent/"frames"

for img_path in frames_old.iterdir():
dtype, *_ = img_path.name.split('_')
idxs = parse_suffix(img_path.name)
new_path = frames_dest/dtype/f"camera_{idxs['subcam']}"/img_path.name
Expand Down
31 changes: 18 additions & 13 deletions worldgen/tools/manage_datagen_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,16 +487,15 @@ def jobs_to_launch_next(
max_stuck_at_task: int = None
):

def inflight(s):
return s['num_running'] + s['num_done']

if greedy:
scenes = sorted(
copy(scenes),
key=lambda s: s['num_running'] + s['num_done'],
reverse=True
)
scenes = sorted(copy(scenes), key=inflight, reverse=True)

done_counts = np.array([s['num_done'] + s['num_running'] for s in scenes])
numdone_unique, curr_at_each_numdone = np.unique(done_counts, return_counts=True)
numdone_unique = list(numdone_unique)
inflight_counts = np.array([inflight(s) for s in scenes if s['all_done'] == SceneState.NotDone])
inflight_uniq, curr_per_inflight = np.unique(inflight_counts, return_counts=True)
inflight_uniq = list(inflight_uniq)

total_queued = sum(
v for (s, _), v in state_counts.items()
Expand All @@ -512,14 +511,20 @@ def jobs_to_launch_next(
if scene.get('any_fatal_crash', False):
continue

ndone_if_launch = scene['num_done'] + 1
inflight_if_launch = inflight(scene) + 1
stuck_at_next = (
curr_at_each_numdone[numdone_unique.index(ndone_if_launch)]
if ndone_if_launch in numdone_unique else 0
curr_per_inflight[inflight_uniq.index(inflight_if_launch)]
if inflight_if_launch in inflight_uniq else 0
)

if max_stuck_at_task is not None and stuck_at_next >= max_stuck_at_task:
logging.info(f"{seed} - Not launching due to {stuck_at_next=} > {max_stuck_at_task} for {ndone_if_launch=}")
if (
max_stuck_at_task is not None and
stuck_at_next >= max_stuck_at_task
):
logging.info(
f"{seed} - Not launching due to {stuck_at_next=} >"
f" {max_stuck_at_task} for {inflight_if_launch=}"
)
continue

for rec in iterate_scene_tasks(scene, args, monitor_all=False):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,11 @@ iterate_scene_tasks.cam_id_ranges = [1, 1]
iterate_scene_tasks.global_tasks = [
{'name': 'coarse', 'func': @queue_coarse},
{'name': "populate", 'func': @queue_populate},
{'name': 'backuppopulate', 'func': @renderbackup/queue_populate, 'condition': 'prev_failed'}
{'name': 'backuppopulate', 'func': @renderbackup/queue_populate, 'condition': 'prev_failed'},
{'name': "fineterrain", 'func': @queue_fine_terrain},
]
iterate_scene_tasks.view_dependent_tasks = [
{'name': "fineterrain", 'func': @queue_fine_terrain},
{'name': "fineterrain", 'func': @queue_fine_terrain},
]
iterate_scene_tasks.camera_dependent_tasks = [
{'name': 'shortrender', 'func': @rendershort/queue_render},
Expand Down
35 changes: 26 additions & 9 deletions worldgen/tools/util/upload_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import platform
import tarfile
import json
import itertools

import time
from datetime import datetime
Expand All @@ -34,9 +35,9 @@
('fine*/*.csv', 'KEEP'),
('fine*/*.json', 'KEEP'),

('savemesh*/*', 'DELETE'),
('savemesh*', 'DELETE'),
('coarse/assets', 'DELETE'),
('coarse/scene.blend', 'DELETE'),
('coarse/scene.blend*', 'DELETE'),
('fine*/assets', 'DELETE'),
('tmp', 'DELETE'),
('*/*.b_displacement.npy', 'DELETE'),
Expand Down Expand Up @@ -76,7 +77,13 @@ def apply_manifest_cleanup(scene_folder, manifest):

for glob, action in manifest:

affected = scene_folder.glob(glob)
affected = set()
for p in scene_folder.glob(glob):
affected.add(p)
if p.is_dir():
affected |= set(p.rglob("*"))

print(f'{glob=} {action=} matched {len(affected)=}')

if action == 'KEEP':
keep |= affected
Expand All @@ -89,11 +96,18 @@ def apply_manifest_cleanup(scene_folder, manifest):
else:
raise ValueError(f'Unrecognized {action=}')

assert delete.isdisjoint(keep)

for f in delete:
f.unlink()
for p in scene_folder.rglob('*'):
if p.is_dir() and len(list(p.iterdir())) == 0:
p.rmdir()
if not f.exists():
continue
if f.is_symlink() or not f.is_dir():
f.unlink()
for f in delete:
if not f.exists() or not f.is_dir():
continue
if len([f1 for f1 in f.rglob('*') if not f.is_dir()]) == 0:
shutil.rmtree(f)

def rclone_upload_file(src_file, dst_folder):

Expand Down Expand Up @@ -150,7 +164,7 @@ def write_thumbnail(parent_folder, seed, all_images):

def create_tarball(parent_folder):
tar_path = parent_folder.with_suffix('.tar.gz')
print(f"Performing cleanup and tar to {tar_path}")
print(f"Tarring {parent_folder} to {tar_path}")
with tarfile.open(tar_path, "w:gz") as tar:
tar.add(parent_folder, os.path.sep)
assert tar_path.exists()
Expand Down Expand Up @@ -179,13 +193,16 @@ def upload_job_folder(
parent_folder = Path(parent_folder)
seed = parent_folder.name

print(f'Performing cleanup on {parent_folder}')
apply_manifest_cleanup(parent_folder, UPLOAD_MANIFEST)

upload_func = get_upload_func(method)

upload_dest_folder = get_upload_destfolder(parent_folder.parent)
if dir_prefix_len > 0:
upload_dest_folder = upload_dest_folder/parent_folder.name[:dir_prefix_len]

all_images = sorted(list(parent_folder.rglob("frames*/Image*.png")))
all_images = sorted(list(parent_folder.rglob("**/Image*.png")))

upload_paths = [
write_thumbnail(parent_folder, seed, all_images),
Expand Down

0 comments on commit f1a27a9

Please sign in to comment.