-
Notifications
You must be signed in to change notification settings - Fork 13
/
Copy pathmrgingham-observe-pixel-uncertainty
executable file
·259 lines (200 loc) · 8.86 KB
/
mrgingham-observe-pixel-uncertainty
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
#!/usr/bin/python3
r'''Evaluate observed point distribution from stationary observations
Synopsis:
$ observe-pixel-uncertainty '*.png'
Evaluated 49 observations
mean 1-sigma for independent x,y: 0.26
$ mrcal-calibrate-cameras --observed-pixel-uncertainty 0.26 .....
[ mrcal computes a camera calibration ]
mrgingham has finite precision, so repeated observations of the same board will
produce slightly different corner coordinates. This tool takes in a set of
images (assumed observing a chessboard, with both the camera and board
stationary). It then outputs the 1-standard-deviation statistic for the
distribution of detected corners. This can then be passed in to mrcal:
'mrcal-calibrate-cameras --observed-pixel-uncertainty ...'
The distribution of the detected corners is assumed to be gaussian, and
INDEPENDENT in the horizontal and vertical directions. If the x and y
distributions are each s, then the LENGTH of the deviation of each pixel is a
Rayleigh distribution with expected value s*sqrt(pi/2) ~ s*1.25
THIS TOOL PERFORMS VERY LIGHT OUTLIER REJECTION; IT IS ASSUMED THAT THE SCENE IS
STATIONARY
'''
from __future__ import print_function
import argparse
import re
import sys
def parse_args():
parser = \
argparse.ArgumentParser(description = __doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('--show',
required=False,
choices = ('geometry', 'histograms'),
help='''Visualize something. Arguments can be: "geometry": show the 1-stdev ellipses
of the distribution for each chessboard corner
separately. "histograms": show the distribution of all
the x- and y-deviations off the mean''')
parser.add_argument('--mrgingham',
type=str,
default='',
help='''If we're processing images, these are the arguments given to
mrgingham. If we are reading a pre-computed file, this does
nothing''')
parser.add_argument('--num-corners',
type=int,
default=100,
help='''How many corners to expect in each image. If this is wrong I will throw an
error. Defaults to 100''')
parser.add_argument('--imagersize',
type = int,
nargs = 2,
help='''Optional imager dimensions: width and height. This is optional. If given, we
use it to size the "--show geometry" plot''')
parser.add_argument('input',
type=str,
help='''Either 1: A glob that matches images observing a stationary calibration
target. This must be a GLOB. So in the shell pass in
'*.png' and NOT *.png. These are processed by
'mrgingham' and the arguments passed in with
--mrgingham. Or 2: a vnlog representing corner
detections from these images. This is assumed to be a
file with a filename ending in .vnl, formatted like
'mrgingham' output: 3 columns: filename,x,y''')
return parser.parse_args()
args = parse_args()
# I do these after the arg-parsing so that --help can work without all of this
# being available. That makes the manpage generation work better
import numpy as np
import numpysane as nps
import os
import vnlog
corners_output_process = None
if re.match('.*\.vnl$', args.input):
pipe_corners_read = open(args.input, 'r')
else:
import subprocess
args_mrgingham = 'mrgingham ' + args.mrgingham + ' ' + args.input
sys.stderr.write("Computing chessboard corners by running:\n {}\n". \
format(args_mrgingham))
corners_output_process = subprocess.Popen(args_mrgingham, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
pipe_corners_read = corners_output_process.stdout
# shape (Nobservations,num_corners,2)
points = np.zeros((0, args.num_corners, 2))
# shape (num_corners,2)
points_here = np.zeros((args.num_corners, 2))
ipt = 0
path = None
vnlparser = vnlog.vnlog()
def finish_image(path_new):
global points, points_here, path, ipt
if path is not None:
if ipt != args.num_corners:
raise Exception(f"Unexpected num_points in image {path}. Expected {args.num_corners}, but got {ipt}")
try:
points = nps.glue(points, points_here, axis=-3)
except:
raise Exception("I assume that all image observations have the same number of points.\n" + \
"So far had {} points per image, but image '{}' has {} points per image". \
format(points.shape[-2], path, points_here.shape[-2]))
path = path_new
ipt = 0
for l in pipe_corners_read:
vnlparser.parse(l)
d = vnlparser.values_dict()
if not d or d['x'] is None:
continue
if path != d['filename']:
finish_image(d['filename'])
# should look at decimation levels
points_here[ipt][0] = float(d['x'])
points_here[ipt][1] = float(d['y'])
ipt += 1
finish_image('')
if corners_output_process:
sys.stderr.write("Done computing chessboard corners\n")
if corners_output_process.wait() != 0:
err = corners_output_process.stderr.read()
raise Exception("mrgingham failed: {}".format(err))
if len(points) == 0:
print("Received no target observations")
sys.exit(1)
@nps.broadcast_define( (('n','n'),), (5,) )
def ellipse_stats(M):
l,v = np.linalg.eig(M)
l = np.sqrt(l)
if l[0] > l[1]:
# ...0 is the major axis
# ...1 is the minor axis
r0 = l[0]
r1 = l[1]
v0 = v[:,0]
v1 = v[:,1]
else:
# ...0 is the minor axis
# ...1 is the major axis
r1 = l[0]
r0 = l[1]
v1 = v[:,0]
v0 = v[:,1]
# angle between x axis and major axis
th = np.arctan2(v0[1], v0[0])
rx,ry = np.sqrt(M[0,0]),np.sqrt(M[1,1])
return np.array((r0,r1,rx,ry,th))
points_mean = np.mean(points, axis=0)
points_centered = points - points_mean
all_dxy = nps.clump(points_centered, n=2) # shape: (N,2)
sigma_dxy = np.std(all_dxy,axis=-2)
# I throw out outliers before reporting the statistics. I do it in discrete
# points: any point that has either an outliery x or an outliery y is thrown out
idx_in = np.max(np.abs(all_dxy) - 4.0*sigma_dxy, axis=-1) < 0.0
all_dxy = all_dxy[idx_in, :]
all_dxy -= np.mean(all_dxy, axis=-2)
sigma_dxy = np.std(all_dxy,axis=-2)
title = "Have {} observations, separate x,y stdev: ({:.2f},{:.2f}), joint x,y stdev: {:.2f}". \
format(points.shape[0],
np.std(all_dxy[:,0]),
np.std(all_dxy[:,1]),
np.std(all_dxy.ravel()))
print(title)
if not args.show:
sys.exit(0)
import gnuplotlib as gp
if args.show == 'geometry':
C = np.mean( nps.outer(points_centered, points_centered), axis=0 )
rad_major,rad_minor,rad_x,rad_y,angle = nps.transpose(ellipse_stats(C))
_xrange = None
_yrange = None
_yinv = None
if args.imagersize is not None:
_xrange = [0, args.imagersize[0]-1]
_yrange = [args.imagersize[1]-1, 0]
else:
_yinv = True
gp.plot((points_mean[:,0], points_mean[:,1], 2*rad_major,2*rad_minor, angle*180.0/np.pi,
dict(_with = 'ellipses', tuplesize=5, _legend='1-sigma: dependent x,y')),
(points_mean[:,0], points_mean[:,1], 2*rad_x, 2*rad_y,
dict(_with = 'ellipses', tuplesize=4, _legend='1-sigma: independent x,y')),
(points[...,0].ravel(), points[...,1].ravel(),
dict(_with = 'points')),
square = 1,
_xrange = _xrange,
_yrange = _yrange,
_yinv = _yinv,
wait = 1)
elif args.show == 'histograms':
var_xy = np.var(all_dxy, axis=-2)
binwidth = 0.02
from scipy.special import erf
equations = [ '{k}*exp(-(x)*(x)/(2.*{var})) / sqrt(2.*pi*{var}) title "{what}-distribution: gaussian fit" with lines lw 2'. \
format(what = 'x' if i==0 else 'y',
var = var_xy[i],
k = all_dxy.shape[-2]*erf(binwidth/(2.*np.sqrt(2)*np.sqrt(var_xy[i]))) * np.sqrt(2.*np.pi*var_xy[i])) \
for i in range(2) ]
gp.plot( nps.transpose(all_dxy),
_legend=np.array(('x-distribution: observed ', 'y-distribution: observed')),
histogram = 1,
binwidth = binwidth,
_with = np.array(('boxes fill solid border lt -1', 'boxes fill transparent pattern 1', )),
equation_above = equations,
title = title,
wait = 1)