-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathSynImage.py
270 lines (236 loc) · 10.7 KB
/
SynImage.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
import numpy as np
import bpy
import ssi
from ssi.rotations import Spherical
from mathutils import Euler
from ssi import utils
import json
import math
import time
import os
import sys
import boto3
import shortuuid
import csv
from PIL import Image
from collections import defaultdict
import random
def createCSV(name, ds_name):
header = ['label', 'R', 'G', 'B']
rows = [
['background', '0', '0', '0'],
['barrel', '206', '0', '0'],
['panel_right', '206', '206', '0'],
['panel_left', '0', '0', '206'],
['orbitrak_logo', '0', '206', '206'],
['cygnus_logo', '206', '0', '206']]
with open("render/" + ds_name + "/" + "labels_" + str(name) + '0.csv', 'w') as f:
csv_writer = csv.writer(f)
csv_writer.writerow(header)
csv_writer.writerows(rows)
f.close()
def load_image_into_numpy_array(image):
(im_width, im_height) = image.size
image = image.convert('RGB')
return np.array(image.getdata()).reshape(
(im_height, im_width, 3)).astype(np.uint8)
def load_images_from_paths(image_paths):
images = []
for impath in image_paths:
image = Image.open(impath)
image_np = load_image_into_numpy_array(image)
images.append(image_np)
return images
def deleteImage(name, ds_name):
for f in os.listdir(os.getcwd() +'/render/' + ds_name):
if name in f:
os.remove(os.getcwd() + '/render/' + ds_name + '/' + f)
print('--------------------------------- DELETED IMAGE------------------------------')
def get_xy(name, ds_name):
# read in truth paths from the dataset for both dev and test sets
truth_paths = [os.getcwd() + "/render/" + ds_name + '/truth_' + name + '0.png']
truth_images = load_images_from_paths(truth_paths)
# represented with BGR values. load these in from csv that maps object to color (e.g. left solar panel is always red dot)
colors = {'barrel_top': [0, 0, 206], 'barrel_bottom': [0, 206, 73], 'panel_left':[206, 0, 206], 'panel_right': [0, 206, 206], 'orbitrak_logo': [206, 177, 0], 'cygnus_logo':[206, 0, 0]}
#Back: Blue, Front: Grean, Right: Pink, Left: Cyan
for im in truth_images:
centroids = defaultdict()
centroids['barrel_top'] = 0
centroids['barrel_bottom'] = 0
for color in colors:
idxs = np.where(np.all(im == colors[color], axis=2))
y,x = idxs
if len(y) != 0 and len(x) != 0:
# centroid represented as (y,x)
centroid = (int(round(np.average(idxs[0]))), int(round(np.average(idxs[1]))))
centroids[color] = centroid
# store this centroid dictionary in the metadata file for the image under category 'truth_centroids'
try:
z = list(zip(centroids['barrel_top'], centroids['barrel_bottom']))
avgCenter = (int(round(np.average(z[0]))), int(round(np.average(z[1]))))
centroids['barrel_center'] = avgCenter
deleted = False
except:
deleteImage(name, ds_name)
deleted = True
return centroids, deleted
#********************************************************************************************
############################################
#The following is the main code for image generation
############################################
def generate(ds_name, tags_list):
start_time = time.time()
#poses = utils.random_rotations(20)
#lightAngle = utils.random_rotations(5)
positions = utils.cartesian([0], [1, 2], [3,4,5])
#backgrounds = utils.random_rotations(5)
offsets = utils.cartesian([0.3, 0.6], [0.5])
poses = [
Euler((math.radians(-45.0), math.radians(-60.0), math.radians(-10)), 'XYZ'),
Euler((math.radians(10), math.radians(-50), math.radians(0)), 'XYZ'),
Euler((math.radians(-40), math.radians(-30), math.radians(40)), 'XYZ'),
Euler((math.radians(-45), math.radians(-30), math.radians(-40)), 'XYZ'),
Euler((math.radians(-45), math.radians(-60), math.radians(155)), 'XYZ'),
Euler((math.radians(60), math.radians(-20), math.radians(-50)), 'XYZ'),
Euler((math.radians(180), math.radians(-60), math.radians(-10)), 'XYZ'),
]
backgrounds = [Euler((math.radians(0), math.radians(0), math.radians(0)), 'XYZ'),
Euler((math.radians(60), math.radians(0), math.radians(-40)), 'XYZ'),
Euler((math.radians(0), math.radians(-70), math.radians(0)), 'XYZ'),
Euler((math.radians(-60), math.radians(-50), math.radians(40)), 'XYZ'),
Euler((math.radians(0), math.radians(-50), math.radians(50)), 'XYZ'),
Euler((math.radians(0), math.radians(-45), math.radians(0)), 'XYZ'),
Euler((math.radians(0), math.radians(0), math.radians(80)), 'XYZ'),
Euler((math.radians(100), math.radians(100), math.radians(100)), 'XYZ'),
]
poses_extend = []
for pose in poses:
poses_extend.append(pose)
for _ in range(7):
for i in range(0,3):
new_pose = pose.copy()
rand = random.randint(-40, 40)
while abs(rand) < 10:
rand = random.randint(-40, 40)
new_pose[i] += math.radians(rand)
poses_extend.append(new_pose)
seq = ssi.Sequence.exhaustive(
background = backgrounds,
pose = poses_extend,
distance = [30, 35, 40],
#position = positions
#lighting = lightAngle,
offset = offsets
)
#check if folder exists in render, if not, create folder
try:
os.mkdir("render/" + ds_name)
except Exception:
pass
data_storage_path = os.getcwd() + "/render/" + ds_name
#setting file output stuff
output_node = bpy.data.scenes["Render"].node_tree.nodes["File Output"]
output_node.base_path = data_storage_path
#set black background
#bpy.context.scene.world.color = (0,0,0)
#remove all animation
for obj in bpy.context.scene.objects:
obj.animation_data_clear()
image_num = 0
shortuuid.set_alphabet('12345678abcdefghijklmnopqrstwxyz')
for i, frame in enumerate(seq):
frame.setup(bpy.data.objects["Cygnus_Real"], bpy.data.objects["Camera_Real"], bpy.data.objects["Sun"])
frame.setup(bpy.data.objects["Cygnus_MaskID"], bpy.data.objects["Camera_MaskID"], bpy.data.objects["Sun"])
frame.setup(bpy.data.objects["Truth_Data"], bpy.data.objects["Camera_Truth"], bpy.data.objects["Sun"])
bpy.context.scene.frame_set(0)
#create name for the current image (unique to that image)
name = shortuuid.uuid()
output_node.file_slots[0].path = "image_" + str(name) + "#"
output_node.file_slots[1].path = "mask_" + str(name) + "#"
output_node.file_slots[2].path = "truth_" + str(name) + "#"
createCSV(name, ds_name)
image_num = i + 1
# render
bpy.ops.render.render(scene="Render")
#add centroid truth data to json files
frame.truth_centroids, deleted = get_xy(name, ds_name)
#Tag the pictures
frame.tags = tags_list
# add metadata to frame
frame.sequence_name = ds_name
# dump data to json
if not deleted:
with open(os.path.join(output_node.base_path, "meta_" + str(name) + "0.json"), "w") as f:
f.write(frame.dumps())
print("===========================================" + "\r")
time_taken = time.time() - start_time
print("------Time Taken: %s seconds----------" %(time_taken) + "\r")
print("Number of images generated: " + str(image_num) + "\r")
print("Total number of files: " + str(image_num * 5) + "\r")
print("Average time per image: " + str(time_taken / image_num))
print("Data stored at: " + data_storage_path)
bpy.ops.wm.quit_blender()
############################
#The following is the main code for upload
############################
def upload(ds_name, bucket_name):
print("\n\n______________STARTING UPLOAD_________")
# Create an S3 client
s3 = boto3.client('s3')
print("...begining upload to %s..." % bucket_name)
try:
files =next(os.walk(os.getcwd() + "/render/" + ds_name))[2]
except Exception:
print("...No data set named " + ds_name + " found in starfish/render. Please generate images with that folder name or move existing folder into render folder")
exit()
#count number of files
num_files = 0
# For every file in directory
for file in files:
#ignore hidden files
if not file.startswith('.') and not file.startswith('truth'):
#upload to s3
print("uploading...")
sys.stdout.write("\033[F")
local_file = os.path.join(os.getcwd() + "/render/" + ds_name, file)
s3.upload_file(local_file, bucket_name, ds_name + "/" + file)
num_files = num_files + 1
print("...finished uploading...%d files uploaded..." % num_files)
def validate_bucket_name(bucket_name):
s3t = boto3.resource('s3')
#check if bucket exits. If not return false
if s3t.Bucket(bucket_name).creation_date is None:
print("...Bucket does not exits, enter valid bucket name...")
return False
else:
#if exists, return true
print("...bucket exists....")
return True
#############################################
#Run user input data then run generation/upload
#############################################
def main():
try:
os.mkdir("render")
except Exception:
pass
yes = {'yes', 'y', 'Y'}
runGen = input("*> Generate images?[y/n]: ")
runUpload = input("*> Would you like to upload these images to AWS? [y/n]: ")
if runUpload in yes:
bucket_name = input("*> Enter Bucket name: ")
#check if bucket name valid
while not validate_bucket_name(bucket_name):
bucket_name = input("*> Enter Bucket name: ")
print(" Note: if you want to upload to AWS but not generate images, move folder with images to 'render' and enter folder name. If the folder name exists, images will be stored in that directory")
dataset_name = input("*> Enter name for folder: ")
print(" Note: rendered images will be stored in a directory called 'render' in the same local directory this script is located under the directory name you specify.")
tags = input("*> Enter tags for the batch seperated with space: ")
tags_list = tags.split();
if runGen in yes:
generate(dataset_name, tags_list)
if runUpload in yes:
upload(dataset_name, bucket_name)
print("______________DONE EXECUTING______________")
if __name__ == "__main__":
main()