Skip to content

Commit

Permalink
Initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
simonrw committed Jun 8, 2015
0 parents commit eb49634
Show file tree
Hide file tree
Showing 5 changed files with 342 additions and 0 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# Pipeline reduction
64 changes: 64 additions & 0 deletions bin/pipebias.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import math
import sys
import os
import numpy as np
from extract_overscan import extract_overscan
from pipeutils import open_fits_file
from astropy.io import fits

inlist = str(sys.argv[1])
biasname = str(sys.argv[2])
outdir = str(sys.argv[3])+'/'

os.system('rm -f '+outdir+'bsorted*')

def biasmaker():
os.system('mkdir '+outdir)
position = 0
i = 1
for line in file(inlist):
fname = outdir+'bsorted'+"{0:03d}".format(position)
f = open(fname, 'a')
f.write(line)
f.close()
if i == 50:
i = 0
position += 1
i += 1

os.system('ls '+outdir+'bsorted* >removeindexlist.dat')


i = 1

for line in file('removeindexlist.dat'):

datamatrix = []
mastermatrix = []
call = line.strip('\n')
for line in file(call):
line = line.strip()
with open_fits_file(line) as hdulist:
overscan = extract_overscan(hdulist)
data = hdulist[0].data[0:2048,20:2068]
corrected = data-np.median(overscan)
datamatrix.append(corrected)
print 'medianing'
print np.shape(datamatrix)
master = np.median(datamatrix, axis=0)
print i
mastermatrix.append(master)
i += 1

print 'averaging'
print np.shape(mastermatrix)
bias = np.mean(mastermatrix, axis=0)

phdu = fits.PrimaryHDU(bias)
outname = outdir+biasname
command = 'rm -f '+outname
os.system(command)
phdu.writeto(outname)

os.system('rm -f '+outdir+'bsorted* removeindexlist.dat')
biasmaker()
73 changes: 73 additions & 0 deletions bin/pipedark.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import math
import sys
import os
import numpy as np
from astropy.io import fits as pyfits
from extract_overscan import extract_overscan
from pipeutils import open_fits_file

inlist = str(sys.argv[1])
biasname = str(sys.argv[2])
darkname = str(sys.argv[3])
outdir = str(sys.argv[4])+'/'

biasname = outdir+biasname

os.system('rm -f '+outdir+'dsorted*')

def darkmaker():


with open_fits_file(biasname) as hdulist:
bias = hdulist[0].data
position = 0
i = 1
for line in file(inlist):
fname = outdir+'dsorted'+"{0:03d}".format(position)
f = open(fname, 'a')
f.write(line)
f.close()
if i == 50:
i = 0
position += 1
i += 1

os.system('ls '+outdir+'dsorted* >removeindexlist.dat')


i = 1

for line in file('removeindexlist.dat'):

datamatrix = []
mastermatrix = []
call = line.strip('\n')
for line in file(call):
line = line.strip()
with open_fits_file(line) as hdulist:
overscan = extract_overscan(hdulist)
data = hdulist[0].data[0:2048,20:2068]
exposure = hdulist[0].header['exposure']
corrected = (data-np.median(overscan)-bias)/exposure
datamatrix.append(corrected)
print np.shape(datamatrix)
master = np.median(datamatrix, axis=0)
print i
mastermatrix.append(master)
i += 1

print 'averaging'
print np.shape(mastermatrix)
dark = np.mean(mastermatrix, axis=0)

phdu = pyfits.PrimaryHDU(dark)

outname = outdir+darkname
command = 'rm -f '+outname
os.system(command)
phdu.writeto(outname)

os.system('rm -f removeindexlist.dat '+outdir+'dsorted*')


darkmaker()
136 changes: 136 additions & 0 deletions bin/pipeflat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
from __future__ import print_function

import math
import sys
import os
import numpy as np
from astropy.io import fits as pyfits
from extract_overscan import extract_overscan
from pipeutils import open_fits_file

def render_total_file(data, fname, nfiles):
hdu = pyfits.PrimaryHDU(data)
hdu.header.set('nfiles', nfiles)
hdu.writeto(fname)


inlist = str(sys.argv[1])
biasname = str(sys.argv[2])
darkname = str(sys.argv[3])
smname = str(sys.argv[4])
flatname = str(sys.argv[5])
outdir = str(sys.argv[6])+'/'

biasname = outdir+biasname
darkname= outdir+darkname
smname = outdir+os.path.basename(smname)
totalname = outdir+'flat_total.fits'
def reducer():
os.system('mkdir '+outdir+'flats')
with open_fits_file(biasname) as hdulist:
bias = hdulist[0].data
with open_fits_file(darkname) as hdulist:
dark = hdulist[0].data
with open_fits_file(smname) as hdulist:
sm = hdulist[0].data
os.system('rm -f '+outdir+'datafile.dat')
os.system('rm -f '+outdir+'variance.fits')
os.system('rm -f '+outdir+flatname)
os.system('rm -f '+outdir+'std.fits')
os.system('rm -f '+outdir+'expdata.dat')
frameno = 1

nflat_files = 0
flat_total = np.zeros(dark.shape)
datamatrix = []
expfile = outdir+'expdata.dat'
for line in file(inlist):
stripped = line.strip()
with open_fits_file(stripped) as hdulist:
header = hdulist[0].header
overscan = extract_overscan(hdulist)
data = hdulist[0].data[0:2048,20:2068]
exposure = header['exposure']
mjd = header['mjd']
median_data = np.median(data[:, 20:-20])

f = open(expfile, 'a')
f.write(str(exposure)+'\n')
f.close()

to_include = (exposure >= 3) & (median_data < 40000)
if not to_include:
print("Skipping file {fname}, exptime={exptime}, med_data={med}"
.format(fname=stripped, exptime=exposure, med=median_data),
file=sys.stderr)
else:

corrected1 = (data-np.median(overscan)-bias-(dark*exposure))
flat_total += corrected1
nflat_files += 1

# corrected2 = corrected1/(1-(sm/exposure))

fmean = np.mean(corrected1)
fstd = np.std(corrected1)

normalised = corrected1/fmean
# normalised = corrected1
path, fname = os.path.split(stripped)
outname = outdir+'flats/'+'proc'+fname.replace('.bz2', '')
dfile = outdir+'datafile.dat'
f = open(dfile, 'a')
f.write(str(frameno)+" "+str(fmean)+" "+str(fstd)+" "+str(exposure)+" "+outname)
f.close()




datamatrix.append(normalised)



phdu = pyfits.PrimaryHDU(normalised)
phdu.header['exposure'] = exposure
phdu.header['mjd'] = mjd
command = 'rm -f '+outname
os.system(command)
phdu.writeto(outname)
tfile = outdir+'processed.dat'
f = open(tfile, 'a')
f.write(outname)
f.close()

frameno += 1

try:
frame, means, stds = np.loadtxt(dfile, usecols = (0,1,2), unpack = True)
except UnboundLocalError as err:
if 'dfile' in str(err):
raise RuntimeError("All flats invalid. Pipeline cannot continue"
", original error: {}".format(str(err)))


wholestd = np.std(datamatrix, axis=0)

print(np.size(wholestd))

outname = outdir+'std.fits'
pyfits.PrimaryHDU(wholestd).writeto(outname)
print('std done')
variance = 1/(wholestd*wholestd)

outname = outdir+'variance.fits'
pyfits.PrimaryHDU(variance).writeto(outname)
print('var done')
flat = np.median(datamatrix, axis = 0)


outname = outdir+flatname
pyfits.PrimaryHDU(flat).writeto(outname)
print('flat done')

render_total_file(flat_total, totalname, nflat_files)

if __name__ == '__main__':
reducer()
68 changes: 68 additions & 0 deletions bin/pipered.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import math
import sys
import os
import numpy as np
from astropy.io import fits as pyfits
from extract_overscan import extract_overscan
from pipeutils import open_fits_file
from functools import partial
from multiprocessing.dummy import Pool as ThreadPool

inlist = str(sys.argv[1])
biasname = str(sys.argv[2])
darkname = str(sys.argv[3])
flatname = str(sys.argv[5])
caldir = str(sys.argv[6])+'/'
outdir = str(sys.argv[7])+'/'
biasname = caldir+biasname
darkname = caldir+darkname
flatname = caldir+flatname

os.system('rm -f '+outdir+'processed.dat')


def reduce_file(filename, bias, dark, flat):
with open_fits_file(filename) as hdulist:
overscan = extract_overscan(hdulist)
data = hdulist[0].data[0:2048,20:2068]
exposure = hdulist[0].header['exposure']
corrected = (data-np.median(overscan)-bias-(dark*exposure))/flat
path, fname = os.path.split(filename)
outname = outdir+'proc'+fname.replace('.bz2', '')
print outname
hdulist[0].data = corrected
hdulist[0].header.add_history('Overscan of '+str(np.median(overscan))+' subtracted')
hdulist[0].header.add_history('Bias subtracted using '+str(biasname))
hdulist[0].header.add_history('Dark subtracted using '+str(darkname))
hdulist[0].header.add_history('Flat corrected using '+str(flatname))

command = 'rm -f '+outname
os.system(command)
hdulist.writeto(outname)
dfile = outdir+'processed.dat'
f = open(dfile, 'a')
f.write(outname)
f.close()




def reducer():
with open_fits_file(biasname) as hdulist:
bias = hdulist[0].data
with open_fits_file(darkname) as hdulist:
dark = hdulist[0].data
with open_fits_file(flatname) as hdulist:
flat = hdulist[0].data

pool = ThreadPool()
fn = partial(reduce_file, bias=bias, dark=dark, flat=flat)
with open(inlist) as infile:
filenames = [line.strip() for line in infile]

pool.map(fn, filenames)



if __name__ == '__main__':
reducer()

0 comments on commit eb49634

Please sign in to comment.