%pylab inline
# Make default figure size bigger
matplotlib.rcParams['savefig.dpi'] = 150
matplotlib.rcParams['figure.figsize'] = (4,3)
Populating the interactive namespace from numpy and matplotlib
WARNING: pylab import has clobbered these variables: ['diff', 'sqrt', 'floor', 'pi'] `%matplotlib` prevents importing * from pylab and numpy
13 Feb 2015
Brian Harding
Navigate using the arrow keys
# This is a copy of part of FPIprocess.py
#
# TODO: fold in the "Zenith reference" changes to this notebook.
# Right now this code only reflects "Laser reference"
######### Fill this in #############
instr_name = 'minime05'
year = 2013
doy = 7
####################################
import re
import FPI
import glob
from lmfit import Parameters
import logging
from optparse import OptionParser
import MySQLdb as mdb
import datetime
import numpy as np
import os
import BoltwoodSensor
import X300Sensor
import FPIDisplay
import pytz
import datetime
import fpiinfo
reload(fpiinfo)
reload(FPIDisplay)
reload(FPI)
direc_tol = 3.0
fpi_dir = '/rdata/airglow/fpi/'
nominal_dt = datetime.datetime(year,1,1) + datetime.timedelta(days = doy-1)
site_name = fpiinfo.get_site_of(instr_name, nominal_dt)
if site_name is None:
raise Exception('Instrument %s not registered on this date' % instr_name)
site = fpiinfo.get_site_info(site_name)
# Import the instrument information
instrument = fpiinfo.get_instr_info(instr_name, nominal_dt)
data_stub = fpi_dir + instr_name + '/' + site_name + '/'
# Create "minime05_uao_20130729" string
datestr = nominal_dt.strftime('%Y%m%d')
instrsitedate = instr_name + '_' + site_name + '_' + datestr
# Determine the times between which files will be accepted.
# Use local-time noon on doy to local-time noon on doy+1
local = pytz.timezone(site['Timezone'])
start_dt = datetime.datetime(year,1,1) + datetime.timedelta(days = doy-1, hours = 12)
stop_dt = datetime.datetime(year,1,1) + datetime.timedelta(days = doy-1, hours = 36)
start_dt = local.localize(start_dt)
stop_dt = local.localize(stop_dt)
# Create a list of relevant filenames by searching in adjacent
# days' directories also, since we might have problems with UT/LT,
# time zones, etc.
laser_fns = []
sky_fns = []
for day_offset in range(-2,3):
dir_dt = nominal_dt + datetime.timedelta(days = day_offset)
# Create the YYYYMMDD date format
yearstr = dir_dt.strftime('%Y')
datestr = dir_dt.strftime('%Y%m%d')
# Create the directory name for the data to be processed
data_dir = data_stub + yearstr + '/' + datestr + '/'
# Look in the data directory, and grab the files if they were
# taken between the start and stop times.
# First, lasers:
fns_all = glob.glob(data_dir + '*L*.img')
for fn in fns_all:
d = FPI.ReadIMG(fn)
dtime = local.localize(d.info['LocalTime'])
if dtime > start_dt and dtime < stop_dt:
laser_fns.append(fn)
# Second, sky:
fns_all = glob.glob(data_dir + '*X*.img')
for fn in fns_all:
d = FPI.ReadIMG(fn)
dtime = local.localize(d.info['LocalTime'])
if dtime > start_dt and dtime < stop_dt:
sky_fns.append(fn)
#print fn
laser_fns.sort()
sky_fns.sort()
if not laser_fns and not sky_fns:
raise Exception('No %s data found between %s and %s. \n' % (instr_name, str(start_dt), str(stop_dt)))
if not laser_fns and sky_fns:
raise Exception('No %s laser data found between %s and %s. Sky data found.\n' % \
(instr_name, str(start_dt), str(stop_dt)))
# Create FPI.ParameterFit arguments
N = instrument['N']
N0 = instrument['N0']
N1 = instrument['N1']
logfile = None
horizon_cutoff = '-6:0'
datestr = nominal_dt.strftime('%Y%m%d')
print instrsitedate
# This is a copy of the beginning of FPI.ParameterFit()
import subprocess
import re
import Image
from math import pi, floor, sqrt
import numpy as np
import peakdetect as pd
from lmfit import Minimizer, Parameters, report_errors, minimize
import scipy
from scipy import interpolate
import glob as glob
import time
import matplotlib.pyplot as pyplot
from matplotlib.mlab import prctile
from scipy import ndimage
import mahotas
import pytz
from pytz import timezone
import datetime
from scipy import ndimage
import mahotas
import matplotlib.dates as dates
import ephem
# For compatibility with running outside FPI.py
from FPI import *
center_variation_max = 3. # pixels, variation throughout night. Above this, outliers > 1 stdev will be ignored.
# Set threshold below which, the solved-for lamc
# will be used to center the grid search for the next iteration
SIGMA_V_THRESH = 30.
MOON_THRESH = 37. # deg. Don't use samples that are closer than this angle to the moon.
# Whether to estimate blur parameters. This was found necessary
# because the Morocco FPI appears to have something wrong with it, and
# there are fewer artifacts in the estimated temperature if this is set
# to False.
ESTIMATE_BLUR = True
if instrument['Abbreviation'] == 'minime03':
ESTIMATE_BLUR = False
LAS_I_THRESH = 25. # counts. If the laser intensity is less than this, ignore it.
print len(sky_fns)
minime05_uao_20130107 148
/usr/lib/pymodules/python2.7/matplotlib/__init__.py:1173: UserWarning: This call to matplotlib.use() has no effect because the backend has already been chosen; matplotlib.use() must be called *before* pylab, matplotlib.pyplot, or matplotlib.backends is imported for the first time. warnings.warn(_use_error_msg)
center_variation_max = 10.0
lasers_all = laser_fns[2:]
sky_all = sky_fns[:]
lasers_all.sort()
sky_all.sort()
if len(lasers_all)==0:
raise Exception('No Laser images found in %s' % data_dir)
# Only use images during sundown conditions.
local = pytz.timezone(site['Timezone']) # pytz timezone of the site
sun = ephem.Sun()
obs = ephem.Observer()
obs.lat = str(site['Location'][0])
obs.lon = str(site['Location'][1])
obs.elevation = site['Location'][2]
obs.pressure = 0.
obs.horizon = horizon_cutoff
# Read the first file, subtract 12 hours, then ask for next sunset time
# Read the first file, and ask for next sunrise time.
d = FPI.ReadIMG(lasers_all[0])
dt0 = local.localize(d.info['LocalTime'])
refdt = dt0 - datetime.timedelta(hours=12)
obs.date = refdt
sunset = obs.next_setting(sun).datetime().replace(tzinfo = pytz.utc)
obs.date = dt0
sunrise = obs.next_rising(sun).datetime().replace(tzinfo = pytz.utc)
lasers = []
for fn in lasers_all:
d = FPI.ReadIMG(fn)
dt = local.localize(d.info['LocalTime'])
if dt > sunset and dt < sunrise:
lasers.append(fn)
else:
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + 'Sun up during %s. Ignoring this image...' % fn )
print 'Sun up during %s. Ignoring this image...' % fn
# Initialize data holder
laser_times_center = []
laser_intT = []
laser_temperature = []
dt_laser = []
center = None
fnames_to_remove = []
for fname in lasers:
# Read in the laser image and estimate the center location
d = ReadIMG(fname)
img = np.asarray(d)
(cx,cy) = FindCenter(img)
appx_I = matplotlib.mlab.prctile(img,95) - matplotlib.mlab.prctile(img,5)
if appx_I < LAS_I_THRESH:
print '%s laser fringes not bright enough to be trustworthy. Ignoring this image in the whole analysis...' % (fname)
fnames_to_remove.append(fname)
# Make sure valid data returned
elif cx is not None and np.isfinite(cx):
if center is None:
center = [cx, cy]
else:
center = np.vstack((center, [cx, cy]))
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + '%s center: %.2f, %.2f\n' % (fname, cx, cy))
# Append the time of the laser image
laser_times_center.append(local.localize(d.info['LocalTime']))
laser_intT.append(d.info['ExposureTime'])
laser_temperature.append(d.info['CCDTemperature'])
dt_laser.append((laser_times_center[-1]-laser_times_center[0]).seconds)
print fname # TODO: DELETE ME
else:
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + \
# '%s center is None or nan. Ignoring this in poly fit...\n' % (fname))
print '%s center is None or nan. Ignoring this image in the whole analysis...' % (fname)
fnames_to_remove.append(fname)
for fname in fnames_to_remove:
lasers.remove(fname)
centersave = center.copy()
dt_laser = np.array(dt_laser)
# Try to do some initial quality control by judging the centerfinding
lt0 = local.localize(datetime.datetime(1989,4,18)) # placeholder, in case we use time-independent fit
# If there are no usable center locations, use the midpoint and write to the log.
if center is None or len(center)==0:
mid = np.ceil(np.shape(d)[0]/2.0)
cx = lambda t: mid
cy = lambda t: mid
print 'WARNING: Centerfinding failed for all laser images. Using midpoint of CCD, ' + \
'but suggested solution is to use Zenith reference.\n'
notify_the_humans = True
# If there is only one usable point, throw a warning but use this point.
elif np.shape(center)==(2,):
cx = lambda t: center[0]
cy = lambda t: center[1]
print 'WARNING: Centerfinding failed for all but 1 laser image. Using it, but' + \
' suggested solution is to use Zenith reference.\n'
notify_the_humans = True
else: # There are > 1 center points. Use them.
# If we have too much variation in the center locations, remove outliers
if (np.std(center,0) > center_variation_max).any(): # if variation is more than a few pixels, there's probably a bad point.
cenx = center[:,0]
ceny = center[:,1]
good = (abs(cenx - np.mean(cenx)) < np.std(cenx)) & (abs(cy - np.mean(ceny)) < np.std(ceny))
#print ind
center = center[good,:]
dt_laser = dt_laser[good]
# Should we ignore the laser images in the subsequent analysis? For now,
# ignore them since these are probably invalid data, and will cause problems
# with laser parameter interpolation.
lasers = [fn for (fn, valid) in zip(lasers, good) if valid]
laser_times_center = [t for (t, valid) in zip(laser_times_center, good) if valid]
nignored = len(good) - sum(good)
if nignored > 0:
print '%03i laser images ignored because the center location is an outlier.\n' % nignored
# If there are enough points after this, fit a poly to the center positions
if len(dt_laser) > 0:
lt0 = laser_times_center[0]
npoly = np.ceil(len(dt_laser)/10) # use an adaptive degree polynomial to fit
# Limit the maximum degree, because of numerical sensitivity for large orders.
if npoly > 10:
npoly = 10
pf_cx = np.polyfit(dt_laser,center[:,0],npoly)
cx = np.poly1d(pf_cx)
pf_cy = np.polyfit(dt_laser,center[:,1],npoly)
cy = np.poly1d(pf_cy)
if len(dt_laser) < 6:
print 'WARNING: Very few (%i) laser images for center trending. Consider using Zenith reference.\n' % len(dt_laser)
notify_the_humans = True
else: # I don't think we can ever get to this point, but just in case:
mid = np.ceil(np.shape(d)[0]/2.0)
cx = lambda t: mid
cy = lambda t: mid
print 'WARNING: No usable center points for polyfit. Using midpoint of CCD, ' + \
'but suggested solution is to use Zenith reference.\n'
notify_the_humans = True
/rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_002453_003.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_004522_004.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_013307_005.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_014238_006.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_023151_007.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_030937_008.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_031446_009.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_035651_010.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_042537_011.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_050122_012.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_051836_013.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_054504_014.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_061537_015.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_065706_016.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_073907_017.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_080207_018.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_084806_019.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_092622_020.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_095308_021.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_103122_022.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_110007_023.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_114406_024.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_115337_025.img /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_123412_148.img
# Initialize values
output = []
last_chi = -999
laser_times = []
laser_redchi = []
last_t = instrument['nominal_t']
lam_laser = instrument['lam_laser']
# Loop through all of the lasers
for fname in lasers:
# Read in the laser image and estimate the center location
d = ReadIMG(fname)
img = np.asarray(d)
# Calculate the annuli to use for this time
dt = (local.localize(d.info['LocalTime'])-lt0).seconds
annuli = FindEqualAreas(img,cx(dt),cy(dt),N)
# Perform annular summation
laser_spectra, laser_sigma = AnnularSum(img,annuli,0)
if not np.isnan(laser_spectra).any(): # Do the inversion. Otherwise, ignore it.
####### Find good initial guesses for the parameters ######
# Magnification parameter by using geometry (assuming square binning)
alpha = instrument['pix_size']/instrument['focal_length'] * d.info['XBinning']
# Intensity and background by looking at fringes
I = laser_spectra.max() - laser_spectra.min()
B = laser_spectra.min()
laser_params = Parameters()
laser_params.add('n', value = 1.0, vary = False)
laser_params.add('t', value = None, vary = False)
laser_params.add('lam', value = lam_laser, vary = False)
laser_params.add('R', value = 0.5, vary = False)
laser_params.add('alpha', value = alpha, vary = False)
laser_params.add('I', value = I, vary = False)
laser_params.add('B', value = B, vary = False)
laser_params.add('a1', value = -0.1, min=-5.0, max=5.0, vary = False)
laser_params.add('a2', value = 0.0, min=-5.0, max=5.0, vary = False)
laser_params.add('b0', value = 0.5, min=-10.0, max=10.0, vary = False)
laser_params.add('b1', value = 0.0, min=-10.0, max=10.0, vary = False)
laser_params.add('b2', value = 0.0, min=-10.0, max=10.0, vary = False)
# To find a good initial guess for "t", we'll need to do a grid search. Search
# over 1 FSR around the last solved-for value (or the nominal value, if this is first trial).
# TODO: make this grid search a separate general function.
def goodness(t):
# return the correlation between the fringe with this t and the data
laser_params['t'].value = t
fringe = Laser_FringeModel(laser_params, annuli['r'])
return np.dot(fringe, laser_spectra)/(norm(fringe)*norm(laser_spectra))
t_candidates = linspace(last_t - lam_laser/4, last_t + lam_laser/4, 50)
corrvec = np.array([goodness(t) for t in t_candidates])
best_t = t_candidates[corrvec.argmax()]
laser_params['t'].value = best_t
####### Inversion of laser image ##########
# Now do least-squares fit, but in stages, varying only certain parameters at a time, according to:
order = [
['alpha'],\
['t','alpha'],\
['B','I'],\
['R'],\
['t','alpha','B','I','R','a1','a2'], \
]
if ESTIMATE_BLUR:
order.append(['t','alpha','B','I','R','a1','a2','b0','b1','b2'])
for group in order: # set all the params in this group to "vary=True", and then run inversion
for param in laser_params.keys():
if param in group:
laser_params[param].vary = True
else:
laser_params[param].vary = False
# Set falloff terms to false, if this instrument only has a couple fringes
if not instrument['many_fringes']:
for param in ['a2', 'b1', 'b2']:
laser_params[param].vary=False
laser_params[param].value=0.0
laser_fit = Minimizer(Laser_Residual,laser_params, \
fcn_args=(annuli['r'][N0:N1],), fcn_kws={'data': laser_spectra[N0:N1], 'sigma': laser_sigma[N0:N1]}, \
scale_covar = True)
laser_fit.leastsq()
#if (laser_fit.redchi/laser_fit.params['I'].value < 1.): # TODO: try to find a better way to do this
if True: # TODO: just keep this?
# This is a valid image, store time
laser_times.append(local.localize(d.info['LocalTime']))
# Save the results
output.append(laser_fit)
last_chi = laser_fit.redchi
last_t = best_t
laser_redchi.append(laser_fit.redchi)
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + \
# '%s reduced chisqr: %.4f\n' % (fname, laser_fit.redchi))
print fname # TODO: DELETE ME
print laser_fit.message
# Check if any laser images were actually analyzed.
if len(output)==0:
raise Exception('%s: No usable laser images found.' % data_dir)
/rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_002453_003.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_004522_004.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_013307_005.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_014238_006.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_023151_007.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_030937_008.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_031446_009.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_035651_010.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_042537_011.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_050122_012.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_051836_013.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_054504_014.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_061537_015.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_065706_016.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_073907_017.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_080207_018.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_084806_019.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_092622_020.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_095308_021.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_103122_022.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_110007_023.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_114406_024.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_115337_025.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_L_20130108_123412_148.img Tolerance seems to be too small.
######### Spline fit to the parameters ###########
# Grab the seconds from the first laser observation (required for spline interpolation)
dt = []
for x in laser_times:
diff = (x - laser_times[0])
dt.append(diff.seconds+diff.days*86400.)
dt = array(dt)
# Spline fit to the parameters.
# (This will work for both the fitted and constant params)
for param in laser_fit.params:
# Grab the estimates and the associated (normalized) uncertanty
p = []
ep = []
for o in output:
p.append(o.params[param].value)
ep.append(o.params[param].stderr)
p = np.array(p)
ep = np.array(ep)
m = len(ep)
s = m # Expected value of chi^2
if param == 't': # force 't' to fit more smoothly
# i.e., we don't trust those small error bars, so make them bigger
ep = ep * 3 # TODO: optimal value? 1 seems too small, 10 seems too big
# set weights
if laser_fit.params[param].vary:
# use the error bars from the inversion, but
# replace the values that are infinite,
# (i.e., where the errorbars couldn't be calculated)
# with very large numbers.
w = [1/stderr if stderr > 0 else 1/(1e6*paramval) for paramval,stderr in zip(p,ep) ]
else:
# use arbitrary error bars (doesn't matter - it will fit exactly)
w = np.ones(shape(ep))
# Try spline, then linear, then zeroth-order interpolation
try: # spline interpolation
#sfit = interpolate.UnivariateSpline(np.array(dt),p, w=w, s=s)
sfit = interpolate.interp1d(np.array(dt), p)
except: # Try linear.
notify_the_humans = True
try: # if there's only one laser image, then even linear won't work.
print 'Spline interpolation failed for laser param "%s". Defaulting to linear interpolation.\n' % (param)
sfit = interpolate.interp1d(np.array(dt), p)
except: # all else failed. There's probably only one laser image.
print 'Spline and linear interpolation both failed for laser param ' + \
'"%s". Defaulting to zeroth-order interpolation.\n' % (param)
sfit = lambda t: p[0] # whatever the input, just return the parameter.
laser_fit.params[param].spfit = sfit
This is an example of a night with large etalon gap fluctuations. Shown here is the estimated etalon gap, obtained by analyzing the laser images. This is more or less equivalent to the "laser wind" that was used in the fringe-by-fringe Fourier analysis.
dt = array([(laser_time - laser_times[0]).total_seconds() for laser_time in laser_times])
# Show the spline fit for a certain parameter
param = 't'
dt_vec = linspace(0, max(dt), 500)
p_vec = laser_fit.params[param].spfit(dt_vec)
p = [o.params[param].value for o in output]
pe = [o.params[param].stderr for o in output]
datet_vec = [laser_times[0] + datetime.timedelta(seconds=ts) for ts in dt_vec]
fig = figure(figsize=(4,2))
h = fig.add_subplot(111)
h.plot(datet_vec,p_vec,'k')
h.errorbar(laser_times,p,yerr=pe,fmt='k.')
h.set_xlim([laser_times[0] - datetime.timedelta(hours=0.5), laser_times[-1] + datetime.timedelta(hours=0.5)])
h.xaxis.set_major_formatter(dates.DateFormatter('%H'))
h.set_ylabel('Estimated Etalon Gap [m]')
h.set_xlabel('Universal Time')
ti = h.set_title(site['Abbreviation'] + ':' + laser_times[0].strftime(' %d %b, %Y %H:%M LT') +\
' - ' + laser_times[-1].strftime('%H:%M LT') )
ti.set_y(1.09)
Clearly, there are fluctuations that are larger than the error bars. This is due to two reasons. First, the etalon heater caused impulsive heating events which caused rapid variations in the effective etalon gap. Second, our laser cadence was too slow to capture these variations.
Since we did not capture the true variation of the etalon gap with our calibration, then we get artifacts when we estimate wind from the sky images. These artifacts can be large, and they are not currently characterized by our error bars.
######################################################################################
instr_name = 'minime05'
year = 2013
doy = 7
######################################################################################
import FPIprocess
from datetime import timedelta
r = FPIprocess.load_level0(instr_name, year, doy)
fpir = r['FPI_Results']
site = r['site']
direction = fpir['direction']
LOSwind = fpir['LOSwind']
sigma_LOSwind = fpir['sigma_LOSwind']
T = fpir['T']
sigma_T = fpir['sigma_T']
I = fpir['skyI']/fpir['sky_intT']
sky_times = fpir['sky_times']
skyI = fpir['skyI']
skyB = fpir['skyB']
sigma_skyI = fpir['sigma_skyI']
intT = fpir['sky_intT']
cloud = None
if fpir['Clouds']:
cloud = fpir['Clouds']['mean']
from matplotlib import dates
fig = figure(figsize=(4,4))
ax = subplot(212)
dref, drefe = FPI.DopplerReference(fpir, reference='laser')
LOSwind_ref = LOSwind - dref
for direc in ['Zenith']:
I = np.array([si for (si,d) in zip(LOSwind_ref, direction) if d == direc])
Ie = np.array([si for (si,d) in zip(sigma_LOSwind, direction) if d == direc])
t = np.array([si for (si,d) in zip(sky_times, direction) if d == direc])
ax.errorbar(t, I, yerr=Ie, fmt='.-', label=direc)
ax.set_xlim([sky_times[0] - timedelta(hours=0.5), sky_times[-1] + timedelta(hours=0.5)])
ax.set_ylim((prctile(LOSwind,5)-100,prctile(LOSwind,95)+100))
ax.xaxis.set_major_formatter(dates.DateFormatter('%H'))
ax.set_ylabel('Vertical Wind, [m/s]')
ax.set_xlabel('Universal Time')
ax.legend(loc='best', prop={'size':8}, numpoints=1, ncol=5)
ax.grid(True)
ax.set_ylim((-100,100))
from matplotlib import dates
param = 't'
ax = fig.add_subplot(211)
ax.errorbar(fpir['laser_times'], fpir['laser_value'][param], yerr=fpir['laser_stderr'][param], fmt='k')
ax.set_xlim([sky_times[0] - timedelta(hours=0.5), sky_times[-1] + timedelta(hours=0.5)])
ax.xaxis.set_major_formatter(dates.DateFormatter('%H'))
ax.set_ylabel('Etalon Gap [m]')
#ax.set_xlabel('Universal Time')
ax.grid(True)
The question is: Can we characterize these fluctuations in such a way as it allows us to add error bars to our wind measurements? The conversion factor can be easily calculated from first principles:
Shown below are the extra error bars for this case, which can reach as large as ~100 m/s. These are calculated by considering the difference between consecutive etalon gap samples.
def calc_wind_calib_error(my_dt):
'''
Calculate the error induced by the misknowledge of the
etalon gap, at the time indicated
INPUTS:
my_dt: number of seconds since first laser exposure.
OUTPUTS:
wind_cal_error: m/s. This is nan if it's not straddled by laser
exposures.
HISTORY:
2015 Feb 02: Written by Brian J. Harding
'''
# Find previous and next laser images:
dt = array([(laser_time - laser_times[0]).total_seconds() for laser_time in laser_times])
if my_dt < dt[0] or my_dt > dt[-1]: # not bounded by laser images
wind_cal_error = nan
else:
# Determine the index of the last laser image
j = sum(dt <= my_dt)-1
# Load the estimated etalon gap from the "before" and "after" laser exposures
t0 = output[j].params['t'].value
t1 = output[j+1].params['t'].value
# Translate the difference to a calibration wind error
wind_cal_error = 3e8/t0 * abs(t1-t0)
#print '%s %s %s [%s, %s]\n' % (my_dt, wind_cal_error, j, dt[0], dt[-1])
return wind_cal_error
my_dt_vec = linspace(0,3600*12,1000) # number of seconds since first laser image
wind_calib_error = zeros(len(my_dt_vec))
for i in range(len(my_dt_vec)):
wind_calib_error[i] = calc_wind_calib_error(my_dt_vec[i])
t = [laser_times[0] + datetime.timedelta(seconds=my_dt) for my_dt in my_dt_vec]
figure(figsize=(4,4))
subplot(2,1,1)
param = 't'
dt_vec = linspace(0, max(my_dt_vec), 500)
p_vec = laser_fit.params[param].spfit(dt_vec)
p = [o.params[param].value for o in output]
pe = [o.params[param].stderr for o in output]
datet_vec = [laser_times[0] + datetime.timedelta(seconds=ts) for ts in dt_vec]
plot(datet_vec,p_vec,'k')
errorbar(laser_times,p,yerr=pe,fmt='k.')
xlim([laser_times[0] - datetime.timedelta(hours=0.5), laser_times[-1] + datetime.timedelta(hours=0.5)])
gca().xaxis.set_major_formatter(dates.DateFormatter('%H'))
ylabel('Etalon Gap [m]')
ti = h.set_title(site['Abbreviation'] + ':' + laser_times[0].strftime(' %d %b, %Y %H:%M LT') + ' - ' + laser_times[-1].strftime('%H:%M LT') )
ti.set_y(1.09)
subplot(2,1,2)
plot(t,wind_calib_error,'k')
gca().xaxis.set_major_formatter(dates.DateFormatter('%H'))
xlabel('UT')
ylabel('Wind Calibration\nError [m/s]')
xlim([laser_times[0] - datetime.timedelta(hours=0.5), laser_times[-1] + datetime.timedelta(hours=0.5)])
tight_layout()
def get_moon_angle(t, lat, lon, az, ze):
'''
Calculate the great-circle angle between the direciton of the moon
and the direction given by (az,ze), for the datetime t and location (lat,lon)
'''
obs = ephem.Observer()
obs.lat = str(lat)
obs.lon = str(lon)
obs.date = t.astimezone(pytz.utc)
moon = ephem.Moon(obs)
moonAz = moon.az.real
moonZe = math.pi/2 - moon.alt.real
# Code copied from original Master scripts for FPI observations
a = math.cos(az*math.pi/180)*math.sin(ze*math.pi/180)
b = math.sin(az*math.pi/180)*math.sin(ze*math.pi/180)
aMoon = math.cos(moonAz)*math.sin(moonZe)
bMoon = math.sin(moonAz)*math.sin(moonZe)
moonAngle = math.acos(a*aMoon + b*bMoon + math.cos(ze*math.pi/180) * math.cos(moonZe))
return moonAngle*180./pi
############# Invert the sky images ###############
# Grab all sky images. Don't use sky images when sun is up,
# or before (after) the first (last) laser image, or if
# the moon is too close to the line of sight.
sky = []
for fn in sky_all[:]:
d = ReadIMG(fn)
dt = local.localize(d.info['LocalTime'])
sundown = dt > sunset and dt < sunrise
laserstraddle = dt > laser_times[0] and dt < laser_times[-1]
# calculate moon angle
moonangle = get_moon_angle(dt, site['Location'][0], site['Location'][1], d.info['azAngle'], d.info['zeAngle'])
if not sundown: # Write to log and don't use this image
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + \
#'Sun up during %s. Ignoring this image...\n' % fn )
print('Sun up during %s. Ignoring this image...\n' % fn )
elif not laserstraddle: # Write to log and don't use this image
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + \
#'%s not bounded by laser images. Ignoring this image...\n' % fn )
print('%s not bounded by laser images. Ignoring this image...\n' % fn )
elif moonangle < MOON_THRESH:
print('%s is looking too close to the moon (%.1f deg). Ignoring this image...\n' % (fn,moonangle))
######################## HACK TO ONLY ANALYZE ZENITH MEASUREMENTS ##############################
elif abs(d.info['zeAngle']) > 0.5:
pass
################################################################################################
else:
sky.append(fn)
if len(sky) == 0:
raise Exception('%s: No usable sky images found.' % data_dir)
# Constants
c = 299792458.
k = 1.3806503e-23
m = 16/6.0221367e26
# Initialize data holder
sky_times = []
az = []
ze = []
LOSwind = []
sigma_LOSwind = []
calerror_LOSwind = []
T = []
sigma_T = []
direction = []
sky_redchi = []
skyI = []
skyB = []
ccdB = []
sigma_skyI = []
sigma_skyB = []
sigma_ccdB = []
sky_out = []
sky_intT = []
sky_temperature = []
# Categorize the look directions in the directory
valid_az = array([site['Directions'][direc]['az'] for direc in site['Directions']])
valid_ze = array([site['Directions'][direc]['ze'] for direc in site['Directions']])
direc_names = site['Directions'].keys()
all_az = []
all_ze = []
for fname in sky:
d = ReadIMG(fname)
all_az.append(d.info['azAngle'])
all_ze.append(d.info['zeAngle'])
#print('%s: %.1f, %.1f' % (fname, d.info['azAngle'], d.info['zeAngle']))
direc_idx, dists = sort_look_directions(valid_az, valid_ze, all_az, all_ze, direc_tol)
lam0 = instrument['lam0']
last_lamc = lam0 # nominal value of line center
lamc_start_set = False # Turns true when a reasonable search interval center is found
my_params = dict()
for i, fname in enumerate(sky):
# Read in the sky image
d = ReadIMG(fname)
img = np.asarray(d)
# Calculate the annuli to use for this time
dt = (local.localize(d.info['LocalTime'])-lt0).seconds
annuli = FindEqualAreas(img,cx(dt),cy(dt),N)
# Perform the annular summation
sky_spectra, sky_sigma = AnnularSum(img,annuli,0)
# See if there are any NaNs in the spectra. If so, skip this one.
if np.isnan(sky_spectra).sum() == 0:
# Append the time of the sky image
sky_times.append(local.localize(d.info['LocalTime']))
sky_intT.append(d.info['ExposureTime'])
sky_temperature.append(d.info['CCDTemperature'])
# Record the look direction.
# For the record, use "human-friendly" azimuth and zenith:
temp_az = d.info['azAngle']
temp_ze = d.info['zeAngle']
if temp_ze < 0.0:
temp_ze = abs(temp_ze)
temp_az = np.mod(temp_az+180.0,360)
az.append(temp_az)
ze.append(temp_ze)
# Change direction index into string
idx_match = direc_idx[i] # This is a list like [1] or [1,3] or []
# How many matches were there? Do something different for each case.
direcstr = ''
if len(idx_match)==0: # No match was found
direcstr = 'Unknown'
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + \
#'%s has a look direction that is not recognized: Az=%.1f, Ze=%.1f.\n' % (fname, temp_az, temp_ze))
print('%s has a look direction that is not recognized: Az=%.1f, Ze=%.1f.\n' % (fname, temp_az, temp_ze))
elif len(idx_match)==1: # unique recognized look direction
direcstr = direc_names[idx_match[0]]
else: # more than one match was found
# If "Zenith" is one of them, use it, otherwise, use any of them.
# Write a warning to the log.
direcstr_list = [direc_names[j] for j in idx_match]
if 'Zenith' in direcstr_list:
direcstr = 'Zenith'
else:
direcstr = direcstr_list[0]
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + \
# '%s has a look direction with > 1 match: Az=%.1f, Ze=%.1f. ' + \
# 'Matches: %s. Choosing: %s\n' % \
# (fname, temp_az, temp_ze, str(direcstr_list), direcstr))
print('%s has a look direction with > 1 match: Az=%.1f, Ze=%.1f. Matches: %s. Choosing: "%s"\n' %\
(fname, temp_az, temp_ze, str(direcstr_list), direcstr) )
direction.append(direcstr)
# Grab the sky times
diff = sky_times[-1] - laser_times[0]
my_dt = diff.seconds+diff.days*86400.
# Set the fitting parameter guesses
sky_params = Parameters()
# Use the spline-interpolated values for the instrument function at this time
for param in laser_fit.params:
sky_params.add(param, value = laser_fit.params[param].spfit(my_dt), vary = False)
# But change the sky wavelength to lam0
sky_params['lam'].value = lam0
# Set up the forward model
L = 301
A_1D, lamvec = get_conv_matrix_1D(sky_params, annuli['r'][N0:N1], L, lam0)
# Come up with good initial guesses for the sky parameters
skyI_guess = sky_spectra[N0:N1].max() - sky_spectra[N0:N1].min()
skyB_guess = 0.0
ccdB_guess = sky_spectra[N0:N1].min()
sky_params.add('lamc', value = lam0, vary = True ) # We'll do a grid search to find a good value for lamc
sky_params.add('T', value = 1000, vary = True , min = 0.1, max = 5000.0)
sky_params.add('skyI', value = skyI_guess, vary = True , min = 0.0)
sky_params.add('skyB', value = skyB_guess, vary = True )
sky_params.add('skym', value = 0.0, vary = False) # Don't try to estimate skym (or should we?)
sky_params.add('ccdB', value = ccdB_guess, vary = True )
sky_params.add('lam0', value = lam0, vary = False) # This is determined by chemistry so it can't change.
# Do a grid search to find a good starting value for lamc.
# The instrument bias might mean that lam0 is a really bad value to start with.
# Search over one FSR around the last solved-for value (or the nominal value, if it's the first run).
# It's not really important to sort by direction, since the instrument bias dominates.
def goodness(lamc):
# return the correlation between the fringe with this lamc and the data
sky_params['lamc'].value = lamc
fringe = Sky_FringeModel(sky_params, annuli['r'][N0:N1], lamvec, A_1D)
return np.dot(fringe, sky_spectra[N0:N1])/(norm(fringe)*norm(sky_spectra[N0:N1]))
FSR = lam0**2/(2*sky_params['t'].value)
lamc_candidates = linspace(last_lamc - FSR/2, last_lamc + FSR/2, 50)
corrvec = np.array([goodness(lamc) for lamc in lamc_candidates])
best_lamc = lamc_candidates[corrvec.argmax()]
sky_params['lamc'].value = best_lamc
# Make a copy for future debugging
sky_params_init = Parameters()
for p in sky_params:
sky_params_init.add(p, value = sky_params[p].value)
# Take the inversion in steps
order = [
['skyI'],\
['skyI','ccdB'],\
['skyI', 'ccdB', 'skyB'], \
['skyI', 'ccdB', 'skyB', 'lamc', 'T'], \
]
for group in order: # set all the params in this group to "vary=True", and then run inversion
for param in sky_params.keys():
if param in group:
sky_params[param].vary = True
else:
sky_params[param].vary = False
sky_fit = Minimizer(Sky_Residual,sky_params,fcn_args=(annuli['r'][N0:N1],lamvec,A_1D), \
fcn_kws={'data': sky_spectra[N0:N1], 'sigma': sky_sigma[N0:N1]}, scale_covar=True)
sky_fit.prepare_fit()
sky_fit.leastsq()
# if the inversion failed, replace values with nans and set error bars to inf
if not sky_fit.success or not sky_fit.errorbars:
for p in sky_fit.params:
sky_fit.params[p].value = np.nan
sky_fit.params[p].stderr = np.inf
sky_redchi.append(sky_fit.redchi)
sky_out.append(sky_fit)
# Calculate wind calibration error
calerror_LOSwind.append(calc_wind_calib_error(my_dt))
# Transform from lamc to v and collect all parameters
lamc = sky_params['lamc'].value
sigma_lamc = sky_params['lamc'].stderr
v = c * (lamc/lam0 - 1)
sigma_v = c * sigma_lamc / lam0
LOSwind.append(v) # negative sign
sigma_LOSwind.append(sigma_v)
T.append(sky_fit.params['T'].value)
sigma_T.append(sky_fit.params['T'].stderr)
skyI.append(sky_params['skyI'].value)
sigma_skyI.append(sky_params['skyI'].stderr)
skyB.append(sky_params['skyB'].value)
sigma_skyB.append(sky_params['skyB'].stderr)
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + '%s reduced chisqr: %.4f\n' % (fname, sky_fit.redchi))
print('%s reduced chisqr: %.4f. Message:"%s"' % (fname, sky_fit.redchi, sky_fit.message))
# use the lamc value to center the next grid search, if the error bars are small enough
if sigma_v < SIGMA_V_THRESH and sigma_v > 0 and not lamc_start_set: # sigma_v==0 means it didn't converge
last_lamc = lamc # save for next time
lamc_start_set = True
#print 'v=%0.1f\tsigma_v=%.1f\tnext_init=%.1f' % (v, sigma_v, c*(last_lamc/lam0 - 1) )
else:
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + '%s is invalid and not analyzed\n' % fname)
print '%s is invalid and not analyzed' % fname
if len(sky) == 0:
print '%s: No usable sky images found.' % data_dir
raise Exception('%s: No usable sky images found.' % data_dir)
# Convert sky_params to array
n = len(sky_out)
sky_value = {}
sky_stderr = {}
for p in sky_out[-1].params:
sky_value[p] = np.zeros(n)
sky_stderr[p] = np.zeros(n)
for i in range(0,n):
sky_value[p][i] = sky_out[i].params[p].value
sky_stderr[p][i] = sky_out[i].params[p].stderr
Sun up during /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130107_231520_001.img. Ignoring this image... /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130107_231931_002.img not bounded by laser images. Ignoring this image... /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130107_232339_003.img not bounded by laser images. Ignoring this image... /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130107_232736_004.img not bounded by laser images. Ignoring this image... /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130107_233121_005.img not bounded by laser images. Ignoring this image... /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130107_233630_006.img not bounded by laser images. Ignoring this image... /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130107_234451_007.img not bounded by laser images. Ignoring this image... /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130107_235036_008.img not bounded by laser images. Ignoring this image... /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130107_235421_009.img not bounded by laser images. Ignoring this image... /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_000207_010.img not bounded by laser images. Ignoring this image... /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_000751_011.img not bounded by laser images. Ignoring this image... /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_001136_012.img not bounded by laser images. Ignoring this image... /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_001721_013.img not bounded by laser images. Ignoring this image... /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_002107_014.img not bounded by laser images. Ignoring this image... /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_005851_021.img reduced chisqr: 14.6761. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_013853_028.img reduced chisqr: 13.0334. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_021052_034.img reduced chisqr: 7.3316. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_025223_041.img reduced chisqr: 7.7328. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_033623_049.img reduced chisqr: 6.5012. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_041407_056.img reduced chisqr: 6.3196. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_045222_063.img reduced chisqr: 5.3051. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_053007_070.img reduced chisqr: 3.6896. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_054628_073.img reduced chisqr: 3.3807. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_062044_079.img reduced chisqr: 3.9649. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_070836_087.img reduced chisqr: 2.7389. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_075038_095.img reduced chisqr: 2.8454. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_081723_100.img reduced chisqr: 2.7552. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_084930_106.img reduced chisqr: 2.9952. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_093551_115.img reduced chisqr: 4.5593. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_100437_120.img reduced chisqr: 5.7062. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_104051_127.img reduced chisqr: 7.7505. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_112851_136.img reduced chisqr: 9.0895. Message:"Tolerance seems to be too small." /rdata/airglow/fpi/minime05/uao/2013/20130107/UAO_X_20130108_120507_143.img reduced chisqr: 18.5196. Message:"Tolerance seems to be too small."
This shows the estimated vertical wind with the extra "calibration uncertainty." With this, the error bars seem to better represent our true "uncertainty" about what the vertical wind is.
fig = plt.figure(figsize=(5,3.5))
subplot(211)
#ax.errorbar(sky_times, LOSwind, yerr=sigma_LOSwind ,fmt='k.-')
#for direc in list(set(direction)):
for direc in ['Zenith']:
v = np.array([si for (si,d) in zip(LOSwind, direction) if d == direc])
vref = mean(v)
ve = np.array([si for (si,d) in zip(sigma_LOSwind, direction) if d == direc])
t = np.array([si for (si,d) in zip(sky_times, direction) if d == direc])
errorbar(t, v-vref, yerr=ve, fmt='k.-', label=direc)
xlim([sky_times[0] - datetime.timedelta(hours=0.5), sky_times[-1] + datetime.timedelta(hours=0.5)])
gca().xaxis.set_major_formatter(dates.DateFormatter('%H'))
legend(loc='best', prop={'size':6}, numpoints=1, ncol=3)
ylabel('Vertical wind\n(fit error)[m/s]')
#xlabel('Universal Time')
ylim((-150,150))
grid()
subplot(2,1,2)
for direc in ['Zenith']:
v = np.array([si for (si,d) in zip(LOSwind, direction) if d == direc])
vref = mean(v)
ve_fit = np.array([si for (si,d) in zip(sigma_LOSwind, direction) if d == direc])
ve_cal = np.array([si for (si,d) in zip(calerror_LOSwind, direction) if d == direc])
ve_tot = sqrt(ve_fit**2 + ve_cal**2)
t = np.array([si for (si,d) in zip(sky_times, direction) if d == direc])
errorbar(t, v-vref, yerr=ve_tot, fmt='k.-', label=direc)
ylim((-150,150))
xlim([sky_times[0] - datetime.timedelta(hours=0.5), sky_times[-1] + datetime.timedelta(hours=0.5)])
gca().xaxis.set_major_formatter(dates.DateFormatter('%H'))
ylabel('Vertical wind\n(total error) [m/s]')
grid()
tight_layout()
legend(loc='best', prop={'size':6}, numpoints=1, ncol=3)
<matplotlib.legend.Legend at 0x7fa91dd65610>
Next we show the same plots for a more recent night, after we turned off the etalon heater and increased the laser cadence.
# This is a copy of part of FPIprocess.py
#
# TODO: fold in the "Zenith reference" changes to this notebook.
# Right now this code only reflects "Laser reference"
######### Fill this in #############
instr_name = 'minime05'
year = 2014
doy = 333
####################################
import re
import FPI
import glob
from lmfit import Parameters
import logging
from optparse import OptionParser
import MySQLdb as mdb
import datetime
import numpy as np
import os
import BoltwoodSensor
import X300Sensor
import FPIDisplay
import pytz
import datetime
import fpiinfo
reload(fpiinfo)
reload(FPIDisplay)
reload(FPI)
direc_tol = 3.0
fpi_dir = '/rdata/airglow/fpi/'
nominal_dt = datetime.datetime(year,1,1) + datetime.timedelta(days = doy-1)
site_name = fpiinfo.get_site_of(instr_name, nominal_dt)
if site_name is None:
raise Exception('Instrument %s not registered on this date' % instr_name)
site = fpiinfo.get_site_info(site_name)
# Import the instrument information
instrument = fpiinfo.get_instr_info(instr_name, nominal_dt)
data_stub = fpi_dir + instr_name + '/' + site_name + '/'
# Create "minime05_uao_20130729" string
datestr = nominal_dt.strftime('%Y%m%d')
instrsitedate = instr_name + '_' + site_name + '_' + datestr
# Determine the times between which files will be accepted.
# Use local-time noon on doy to local-time noon on doy+1
local = pytz.timezone(site['Timezone'])
start_dt = datetime.datetime(year,1,1) + datetime.timedelta(days = doy-1, hours = 12)
stop_dt = datetime.datetime(year,1,1) + datetime.timedelta(days = doy-1, hours = 36)
start_dt = local.localize(start_dt)
stop_dt = local.localize(stop_dt)
# Create a list of relevant filenames by searching in adjacent
# days' directories also, since we might have problems with UT/LT,
# time zones, etc.
laser_fns = []
sky_fns = []
for day_offset in range(-2,3):
dir_dt = nominal_dt + datetime.timedelta(days = day_offset)
# Create the YYYYMMDD date format
yearstr = dir_dt.strftime('%Y')
datestr = dir_dt.strftime('%Y%m%d')
# Create the directory name for the data to be processed
data_dir = data_stub + yearstr + '/' + datestr + '/'
# Look in the data directory, and grab the files if they were
# taken between the start and stop times.
# First, lasers:
fns_all = glob.glob(data_dir + '*L*.img')
for fn in fns_all:
d = FPI.ReadIMG(fn)
dtime = local.localize(d.info['LocalTime'])
if dtime > start_dt and dtime < stop_dt:
laser_fns.append(fn)
# Second, sky:
fns_all = glob.glob(data_dir + '*X*.img')
for fn in fns_all:
d = FPI.ReadIMG(fn)
dtime = local.localize(d.info['LocalTime'])
if dtime > start_dt and dtime < stop_dt:
sky_fns.append(fn)
#print fn
laser_fns.sort()
sky_fns.sort()
if not laser_fns and not sky_fns:
raise Exception('No %s data found between %s and %s. \n' % (instr_name, str(start_dt), str(stop_dt)))
if not laser_fns and sky_fns:
raise Exception('No %s laser data found between %s and %s. Sky data found.\n' % \
(instr_name, str(start_dt), str(stop_dt)))
# Create FPI.ParameterFit arguments
N = instrument['N']
N0 = instrument['N0']
N1 = instrument['N1']
logfile = None
horizon_cutoff = '-6:0'
datestr = nominal_dt.strftime('%Y%m%d')
print instrsitedate
# This is a copy of the beginning of FPI.ParameterFit()
import subprocess
import re
import Image
from math import pi, floor, sqrt
import numpy as np
import peakdetect as pd
from lmfit import Minimizer, Parameters, report_errors, minimize
import scipy
from scipy import interpolate
import glob as glob
import time
import matplotlib.pyplot as pyplot
from matplotlib.mlab import prctile
from scipy import ndimage
import mahotas
import pytz
from pytz import timezone
import datetime
from scipy import ndimage
import mahotas
import matplotlib.dates as dates
import ephem
# For compatibility with running outside FPI.py
from FPI import *
center_variation_max = 3. # pixels, variation throughout night. Above this, outliers > 1 stdev will be ignored.
# Set threshold below which, the solved-for lamc
# will be used to center the grid search for the next iteration
SIGMA_V_THRESH = 30.
MOON_THRESH = 37. # deg. Don't use samples that are closer than this angle to the moon.
# Whether to estimate blur parameters. This was found necessary
# because the Morocco FPI appears to have something wrong with it, and
# there are fewer artifacts in the estimated temperature if this is set
# to False.
ESTIMATE_BLUR = True
if instrument['Abbreviation'] == 'minime03':
ESTIMATE_BLUR = False
LAS_I_THRESH = 25. # counts. If the laser intensity is less than this, ignore it.
print len(sky_fns)
minime05_uao_20141129 354
center_variation_max = 10.0
lasers_all = laser_fns[2:]
sky_all = sky_fns[:]
lasers_all.sort()
sky_all.sort()
if len(lasers_all)==0:
raise Exception('No Laser images found in %s' % data_dir)
# Only use images during sundown conditions.
local = pytz.timezone(site['Timezone']) # pytz timezone of the site
sun = ephem.Sun()
obs = ephem.Observer()
obs.lat = str(site['Location'][0])
obs.lon = str(site['Location'][1])
obs.elevation = site['Location'][2]
obs.pressure = 0.
obs.horizon = horizon_cutoff
# Read the first file, subtract 12 hours, then ask for next sunset time
# Read the first file, and ask for next sunrise time.
d = FPI.ReadIMG(lasers_all[0])
dt0 = local.localize(d.info['LocalTime'])
refdt = dt0 - datetime.timedelta(hours=12)
obs.date = refdt
sunset = obs.next_setting(sun).datetime().replace(tzinfo = pytz.utc)
obs.date = dt0
sunrise = obs.next_rising(sun).datetime().replace(tzinfo = pytz.utc)
lasers = []
for fn in lasers_all:
d = FPI.ReadIMG(fn)
dt = local.localize(d.info['LocalTime'])
if dt > sunset and dt < sunrise:
lasers.append(fn)
else:
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + 'Sun up during %s. Ignoring this image...' % fn )
print 'Sun up during %s. Ignoring this image...' % fn
# Initialize data holder
laser_times_center = []
laser_intT = []
laser_temperature = []
dt_laser = []
center = None
fnames_to_remove = []
for fname in lasers:
# Read in the laser image and estimate the center location
d = ReadIMG(fname)
img = np.asarray(d)
(cx,cy) = FindCenter(img)
appx_I = matplotlib.mlab.prctile(img,95) - matplotlib.mlab.prctile(img,5)
if appx_I < LAS_I_THRESH:
print '%s laser fringes not bright enough to be trustworthy. Ignoring this image in the whole analysis...' % (fname)
fnames_to_remove.append(fname)
# Make sure valid data returned
elif cx is not None and np.isfinite(cx):
if center is None:
center = [cx, cy]
else:
center = np.vstack((center, [cx, cy]))
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + '%s center: %.2f, %.2f\n' % (fname, cx, cy))
# Append the time of the laser image
laser_times_center.append(local.localize(d.info['LocalTime']))
laser_intT.append(d.info['ExposureTime'])
laser_temperature.append(d.info['CCDTemperature'])
dt_laser.append((laser_times_center[-1]-laser_times_center[0]).seconds)
print fname # TODO: DELETE ME
else:
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + \
# '%s center is None or nan. Ignoring this in poly fit...\n' % (fname))
print '%s center is None or nan. Ignoring this image in the whole analysis...' % (fname)
fnames_to_remove.append(fname)
for fname in fnames_to_remove:
lasers.remove(fname)
centersave = center.copy()
dt_laser = np.array(dt_laser)
# Try to do some initial quality control by judging the centerfinding
lt0 = local.localize(datetime.datetime(1989,4,18)) # placeholder, in case we use time-independent fit
# If there are no usable center locations, use the midpoint and write to the log.
if center is None or len(center)==0:
mid = np.ceil(np.shape(d)[0]/2.0)
cx = lambda t: mid
cy = lambda t: mid
print 'WARNING: Centerfinding failed for all laser images. Using midpoint of CCD, ' + \
'but suggested solution is to use Zenith reference.\n'
notify_the_humans = True
# If there is only one usable point, throw a warning but use this point.
elif np.shape(center)==(2,):
cx = lambda t: center[0]
cy = lambda t: center[1]
print 'WARNING: Centerfinding failed for all but 1 laser image. Using it, but' + \
' suggested solution is to use Zenith reference.\n'
notify_the_humans = True
else: # There are > 1 center points. Use them.
# If we have too much variation in the center locations, remove outliers
if (np.std(center,0) > center_variation_max).any(): # if variation is more than a few pixels, there's probably a bad point.
cenx = center[:,0]
ceny = center[:,1]
good = (abs(cenx - np.mean(cenx)) < np.std(cenx)) & (abs(cy - np.mean(ceny)) < np.std(ceny))
#print ind
center = center[good,:]
dt_laser = dt_laser[good]
# Should we ignore the laser images in the subsequent analysis? For now,
# ignore them since these are probably invalid data, and will cause problems
# with laser parameter interpolation.
lasers = [fn for (fn, valid) in zip(lasers, good) if valid]
laser_times_center = [t for (t, valid) in zip(laser_times_center, good) if valid]
nignored = len(good) - sum(good)
if nignored > 0:
print '%03i laser images ignored because the center location is an outlier.\n' % nignored
# If there are enough points after this, fit a poly to the center positions
if len(dt_laser) > 0:
lt0 = laser_times_center[0]
npoly = np.ceil(len(dt_laser)/10) # use an adaptive degree polynomial to fit
# Limit the maximum degree, because of numerical sensitivity for large orders.
if npoly > 10:
npoly = 10
pf_cx = np.polyfit(dt_laser,center[:,0],npoly)
cx = np.poly1d(pf_cx)
pf_cy = np.polyfit(dt_laser,center[:,1],npoly)
cy = np.poly1d(pf_cy)
if len(dt_laser) < 6:
print 'WARNING: Very few (%i) laser images for center trending. Consider using Zenith reference.\n' % len(dt_laser)
notify_the_humans = True
else: # I don't think we can ever get to this point, but just in case:
mid = np.ceil(np.shape(d)[0]/2.0)
cx = lambda t: mid
cy = lambda t: mid
print 'WARNING: No usable center points for polyfit. Using midpoint of CCD, ' + \
'but suggested solution is to use Zenith reference.\n'
notify_the_humans = True
/rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_232125_003.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_232637_004.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_233119_005.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_233215_006.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_233812_007.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_234252_008.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_234804_009.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_235344_010.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_235912_011.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_000429_012.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_000908_013.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_001452_014.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_002025_015.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_002116_016.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_002523_017.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_002950_018.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_003429_019.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_003910_020.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_004343_021.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_004858_022.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_005432_023.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_005740_024.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_005942_025.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_010509_026.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_011036_027.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_011643_028.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_012133_029.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_012639_030.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_013241_031.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_013840_032.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_014027_033.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_014358_034.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_014830_035.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_015321_036.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_015810_037.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_020349_038.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_021005_039.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_021544_040.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_022058_041.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_022657_042.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_023319_043.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_023506_044.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_023845_045.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_024417_046.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_024943_047.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_025347_048.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_025930_049.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_030405_050.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_030930_051.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_031945_052.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_032612_053.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_033229_054.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_033949_055.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_034451_056.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_035154_057.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_035706_058.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_040417_059.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_040928_060.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_041506_061.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_042031_062.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_042125_063.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_042828_064.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_043401_065.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_044142_066.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_044825_067.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_045347_068.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_045956_069.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_050925_070.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_051637_071.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_052252_072.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_052811_073.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_053443_074.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_054228_075.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_055139_076.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_055806_077.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_062422_078.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_063635_079.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_064938_080.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_070106_081.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_071259_082.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_072626_083.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_073805_084.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_075129_085.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_080506_086.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_081607_087.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_082812_088.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_083546_089.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_084713_090.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_085905_091.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_090856_092.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_091748_093.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_092823_094.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_093233_095.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_093903_096.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_094619_097.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_095203_098.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_095350_099.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_095739_100.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_100546_101.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_101057_102.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_101846_103.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_102253_104.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_102440_105.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_103003_106.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_103703_107.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_103851_108.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_104247_109.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_104924_110.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_105518_111.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_110049_112.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_110636_113.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_111217_114.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_111804_115.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_112337_116.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_112914_117.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_113429_118.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_113952_119.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_114610_120.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_115110_121.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_115711_122.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_120222_123.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_120734_124.img /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_121332_354.img
# Initialize values
output = []
last_chi = -999
laser_times = []
laser_redchi = []
last_t = instrument['nominal_t']
lam_laser = instrument['lam_laser']
# Loop through all of the lasers
for fname in lasers:
# Read in the laser image and estimate the center location
d = ReadIMG(fname)
img = np.asarray(d)
# Calculate the annuli to use for this time
dt = (local.localize(d.info['LocalTime'])-lt0).seconds
annuli = FindEqualAreas(img,cx(dt),cy(dt),N)
# Perform annular summation
laser_spectra, laser_sigma = AnnularSum(img,annuli,0)
if not np.isnan(laser_spectra).any(): # Do the inversion. Otherwise, ignore it.
####### Find good initial guesses for the parameters ######
# Magnification parameter by using geometry (assuming square binning)
alpha = instrument['pix_size']/instrument['focal_length'] * d.info['XBinning']
# Intensity and background by looking at fringes
I = laser_spectra.max() - laser_spectra.min()
B = laser_spectra.min()
laser_params = Parameters()
laser_params.add('n', value = 1.0, vary = False)
laser_params.add('t', value = None, vary = False)
laser_params.add('lam', value = lam_laser, vary = False)
laser_params.add('R', value = 0.5, vary = False)
laser_params.add('alpha', value = alpha, vary = False)
laser_params.add('I', value = I, vary = False)
laser_params.add('B', value = B, vary = False)
laser_params.add('a1', value = -0.1, min=-5.0, max=5.0, vary = False)
laser_params.add('a2', value = 0.0, min=-5.0, max=5.0, vary = False)
laser_params.add('b0', value = 0.5, min=-10.0, max=10.0, vary = False)
laser_params.add('b1', value = 0.0, min=-10.0, max=10.0, vary = False)
laser_params.add('b2', value = 0.0, min=-10.0, max=10.0, vary = False)
# To find a good initial guess for "t", we'll need to do a grid search. Search
# over 1 FSR around the last solved-for value (or the nominal value, if this is first trial).
# TODO: make this grid search a separate general function.
def goodness(t):
# return the correlation between the fringe with this t and the data
laser_params['t'].value = t
fringe = Laser_FringeModel(laser_params, annuli['r'])
return np.dot(fringe, laser_spectra)/(norm(fringe)*norm(laser_spectra))
t_candidates = linspace(last_t - lam_laser/4, last_t + lam_laser/4, 50)
corrvec = np.array([goodness(t) for t in t_candidates])
best_t = t_candidates[corrvec.argmax()]
laser_params['t'].value = best_t
####### Inversion of laser image ##########
# Now do least-squares fit, but in stages, varying only certain parameters at a time, according to:
order = [
['alpha'],\
['t','alpha'],\
['B','I'],\
['R'],\
['t','alpha','B','I','R','a1','a2'], \
]
if ESTIMATE_BLUR:
order.append(['t','alpha','B','I','R','a1','a2','b0','b1','b2'])
for group in order: # set all the params in this group to "vary=True", and then run inversion
for param in laser_params.keys():
if param in group:
laser_params[param].vary = True
else:
laser_params[param].vary = False
# Set falloff terms to false, if this instrument only has a couple fringes
if not instrument['many_fringes']:
for param in ['a2', 'b1', 'b2']:
laser_params[param].vary=False
laser_params[param].value=0.0
laser_fit = Minimizer(Laser_Residual,laser_params, \
fcn_args=(annuli['r'][N0:N1],), fcn_kws={'data': laser_spectra[N0:N1], 'sigma': laser_sigma[N0:N1]}, \
scale_covar = True)
laser_fit.leastsq()
#if (laser_fit.redchi/laser_fit.params['I'].value < 1.): # TODO: try to find a better way to do this
if True: # TODO: just keep this?
# This is a valid image, store time
laser_times.append(local.localize(d.info['LocalTime']))
# Save the results
output.append(laser_fit)
last_chi = laser_fit.redchi
last_t = best_t
laser_redchi.append(laser_fit.redchi)
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + \
# '%s reduced chisqr: %.4f\n' % (fname, laser_fit.redchi))
print fname # TODO: DELETE ME
print laser_fit.message
# Check if any laser images were actually analyzed.
if len(output)==0:
raise Exception('%s: No usable laser images found.' % data_dir)
/rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_232125_003.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_232637_004.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_233119_005.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_233215_006.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_233812_007.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_234252_008.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_234804_009.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_235344_010.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141129_235912_011.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_000429_012.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_000908_013.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_001452_014.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_002025_015.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_002116_016.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_002523_017.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_002950_018.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_003429_019.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_003910_020.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_004343_021.img Tolerance seems to be too small. /rdata/airglow/fpi/minime05/uao/2014/20141129/UAO_L_20141130_004858_022.img
######### Spline fit to the parameters ###########
# Grab the seconds from the first laser observation (required for spline interpolation)
dt = []
for x in laser_times:
diff = (x - laser_times[0])
dt.append(diff.seconds+diff.days*86400.)
dt = array(dt)
# Spline fit to the parameters.
# (This will work for both the fitted and constant params)
for param in laser_fit.params:
# Grab the estimates and the associated (normalized) uncertanty
p = []
ep = []
for o in output:
p.append(o.params[param].value)
ep.append(o.params[param].stderr)
p = np.array(p)
ep = np.array(ep)
m = len(ep)
s = m # Expected value of chi^2
if param == 't': # force 't' to fit more smoothly
# i.e., we don't trust those small error bars, so make them bigger
ep = ep * 3 # TODO: optimal value? 1 seems too small, 10 seems too big
# set weights
if laser_fit.params[param].vary:
# use the error bars from the inversion, but
# replace the values that are infinite,
# (i.e., where the errorbars couldn't be calculated)
# with very large numbers.
w = [1/stderr if stderr > 0 else 1/(1e6*paramval) for paramval,stderr in zip(p,ep) ]
else:
# use arbitrary error bars (doesn't matter - it will fit exactly)
w = np.ones(shape(ep))
# Try spline, then linear, then zeroth-order interpolation
try: # spline interpolation
#sfit = interpolate.UnivariateSpline(np.array(dt),p, w=w, s=s)
sfit = interpolate.interp1d(np.array(dt), p)
except: # Try linear.
notify_the_humans = True
try: # if there's only one laser image, then even linear won't work.
print 'Spline interpolation failed for laser param "%s". Defaulting to linear interpolation.\n' % (param)
sfit = interpolate.interp1d(np.array(dt), p)
except: # all else failed. There's probably only one laser image.
print 'Spline and linear interpolation both failed for laser param ' + \
'"%s". Defaulting to zeroth-order interpolation.\n' % (param)
sfit = lambda t: p[0] # whatever the input, just return the parameter.
laser_fit.params[param].spfit = sfit
dt = array([(laser_time - laser_times[0]).total_seconds() for laser_time in laser_times])
# Show the spline fit for a certain parameter
param = 't'
dt_vec = linspace(0, max(dt), 500)
p_vec = laser_fit.params[param].spfit(dt_vec)
p = [o.params[param].value for o in output]
pe = [o.params[param].stderr for o in output]
datet_vec = [laser_times[0] + datetime.timedelta(seconds=ts) for ts in dt_vec]
fig = figure(figsize=(4,2))
h = fig.add_subplot(111)
h.plot(datet_vec,p_vec,'k')
h.errorbar(laser_times,p,yerr=pe,fmt='k.')
h.set_xlim([laser_times[0] - datetime.timedelta(hours=0.5), laser_times[-1] + datetime.timedelta(hours=0.5)])
h.xaxis.set_major_formatter(dates.DateFormatter('%H'))
h.set_ylabel('Estimated Etalon Gap [m]')
h.set_xlabel('Universal Time')
ti = h.set_title(site['Abbreviation'] + ':' + laser_times[0].strftime(' %d %b, %Y %H:%M LT') +\
' - ' + laser_times[-1].strftime('%H:%M LT') )
ti.set_y(1.09)
The calibration error is much smaller now.
my_dt_vec = linspace(0,3600*12,1000) # number of seconds since first laser image
wind_calib_error = zeros(len(my_dt_vec))
for i in range(len(my_dt_vec)):
wind_calib_error[i] = calc_wind_calib_error(my_dt_vec[i])
t = [laser_times[0] + datetime.timedelta(seconds=my_dt) for my_dt in my_dt_vec]
figure(figsize=(4,4))
subplot(2,1,1)
param = 't'
dt_vec = linspace(0, max(my_dt_vec), 500)
p_vec = laser_fit.params[param].spfit(dt_vec)
p = [o.params[param].value for o in output]
pe = [o.params[param].stderr for o in output]
datet_vec = [laser_times[0] + datetime.timedelta(seconds=ts) for ts in dt_vec]
plot(datet_vec,p_vec,'k')
errorbar(laser_times,p,yerr=pe,fmt='k.')
xlim([laser_times[0] - datetime.timedelta(hours=0.5), laser_times[-1] + datetime.timedelta(hours=0.5)])
gca().xaxis.set_major_formatter(dates.DateFormatter('%H'))
ylabel('Etalon Gap [m]')
ti = h.set_title(site['Abbreviation'] + ':' + laser_times[0].strftime(' %d %b, %Y %H:%M LT') + ' - ' + laser_times[-1].strftime('%H:%M LT') )
ti.set_y(1.09)
subplot(2,1,2)
plot(t,wind_calib_error,'k')
gca().xaxis.set_major_formatter(dates.DateFormatter('%H'))
xlabel('UT')
ylabel('Wind Calibration\nError [m/s]')
xlim([laser_times[0] - datetime.timedelta(hours=0.5), laser_times[-1] + datetime.timedelta(hours=0.5)])
tight_layout()
def get_moon_angle(t, lat, lon, az, ze):
'''
Calculate the great-circle angle between the direciton of the moon
and the direction given by (az,ze), for the datetime t and location (lat,lon)
'''
obs = ephem.Observer()
obs.lat = str(lat)
obs.lon = str(lon)
obs.date = t.astimezone(pytz.utc)
moon = ephem.Moon(obs)
moonAz = moon.az.real
moonZe = math.pi/2 - moon.alt.real
# Code copied from original Master scripts for FPI observations
a = math.cos(az*math.pi/180)*math.sin(ze*math.pi/180)
b = math.sin(az*math.pi/180)*math.sin(ze*math.pi/180)
aMoon = math.cos(moonAz)*math.sin(moonZe)
bMoon = math.sin(moonAz)*math.sin(moonZe)
moonAngle = math.acos(a*aMoon + b*bMoon + math.cos(ze*math.pi/180) * math.cos(moonZe))
return moonAngle*180./pi
############# Invert the sky images ###############
# Grab all sky images. Don't use sky images when sun is up,
# or before (after) the first (last) laser image, or if
# the moon is too close to the line of sight.
sky = []
for fn in sky_all[:]:
d = ReadIMG(fn)
dt = local.localize(d.info['LocalTime'])
sundown = dt > sunset and dt < sunrise
laserstraddle = dt > laser_times[0] and dt < laser_times[-1]
# calculate moon angle
moonangle = get_moon_angle(dt, site['Location'][0], site['Location'][1], d.info['azAngle'], d.info['zeAngle'])
if not sundown: # Write to log and don't use this image
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + \
#'Sun up during %s. Ignoring this image...\n' % fn )
print('Sun up during %s. Ignoring this image...\n' % fn )
elif not laserstraddle: # Write to log and don't use this image
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + \
#'%s not bounded by laser images. Ignoring this image...\n' % fn )
print('%s not bounded by laser images. Ignoring this image...\n' % fn )
elif moonangle < MOON_THRESH:
print('%s is looking too close to the moon (%.1f deg). Ignoring this image...\n' % (fn,moonangle))
######################## HACK TO ONLY ANALYZE ZENITH MEASUREMENTS ##############################
elif abs(d.info['zeAngle']) > 0.5:
pass
################################################################################################
else:
sky.append(fn)
if len(sky) == 0:
raise Exception('%s: No usable sky images found.' % data_dir)
# Constants
c = 299792458.
k = 1.3806503e-23
m = 16/6.0221367e26
# Initialize data holder
sky_times = []
az = []
ze = []
LOSwind = []
sigma_LOSwind = []
calerror_LOSwind = []
T = []
sigma_T = []
direction = []
sky_redchi = []
skyI = []
skyB = []
ccdB = []
sigma_skyI = []
sigma_skyB = []
sigma_ccdB = []
sky_out = []
sky_intT = []
sky_temperature = []
# Categorize the look directions in the directory
valid_az = array([site['Directions'][direc]['az'] for direc in site['Directions']])
valid_ze = array([site['Directions'][direc]['ze'] for direc in site['Directions']])
direc_names = site['Directions'].keys()
all_az = []
all_ze = []
for fname in sky:
d = ReadIMG(fname)
all_az.append(d.info['azAngle'])
all_ze.append(d.info['zeAngle'])
#print('%s: %.1f, %.1f' % (fname, d.info['azAngle'], d.info['zeAngle']))
direc_idx, dists = sort_look_directions(valid_az, valid_ze, all_az, all_ze, direc_tol)
lam0 = instrument['lam0']
last_lamc = lam0 # nominal value of line center
lamc_start_set = False # Turns true when a reasonable search interval center is found
my_params = dict()
for i, fname in enumerate(sky):
# Read in the sky image
d = ReadIMG(fname)
img = np.asarray(d)
# Calculate the annuli to use for this time
dt = (local.localize(d.info['LocalTime'])-lt0).seconds
annuli = FindEqualAreas(img,cx(dt),cy(dt),N)
# Perform the annular summation
sky_spectra, sky_sigma = AnnularSum(img,annuli,0)
# See if there are any NaNs in the spectra. If so, skip this one.
if np.isnan(sky_spectra).sum() == 0:
# Append the time of the sky image
sky_times.append(local.localize(d.info['LocalTime']))
sky_intT.append(d.info['ExposureTime'])
sky_temperature.append(d.info['CCDTemperature'])
# Record the look direction.
# For the record, use "human-friendly" azimuth and zenith:
temp_az = d.info['azAngle']
temp_ze = d.info['zeAngle']
if temp_ze < 0.0:
temp_ze = abs(temp_ze)
temp_az = np.mod(temp_az+180.0,360)
az.append(temp_az)
ze.append(temp_ze)
# Change direction index into string
idx_match = direc_idx[i] # This is a list like [1] or [1,3] or []
# How many matches were there? Do something different for each case.
direcstr = ''
if len(idx_match)==0: # No match was found
direcstr = 'Unknown'
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + \
#'%s has a look direction that is not recognized: Az=%.1f, Ze=%.1f.\n' % (fname, temp_az, temp_ze))
print('%s has a look direction that is not recognized: Az=%.1f, Ze=%.1f.\n' % (fname, temp_az, temp_ze))
elif len(idx_match)==1: # unique recognized look direction
direcstr = direc_names[idx_match[0]]
else: # more than one match was found
# If "Zenith" is one of them, use it, otherwise, use any of them.
# Write a warning to the log.
direcstr_list = [direc_names[j] for j in idx_match]
if 'Zenith' in direcstr_list:
direcstr = 'Zenith'
else:
direcstr = direcstr_list[0]
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + \
# '%s has a look direction with > 1 match: Az=%.1f, Ze=%.1f. ' + \
# 'Matches: %s. Choosing: %s\n' % \
# (fname, temp_az, temp_ze, str(direcstr_list), direcstr))
print('%s has a look direction with > 1 match: Az=%.1f, Ze=%.1f. Matches: %s. Choosing: "%s"\n' %\
(fname, temp_az, temp_ze, str(direcstr_list), direcstr) )
direction.append(direcstr)
# Grab the sky times
diff = sky_times[-1] - laser_times[0]
my_dt = diff.seconds+diff.days*86400.
# Set the fitting parameter guesses
sky_params = Parameters()
# Use the spline-interpolated values for the instrument function at this time
for param in laser_fit.params:
sky_params.add(param, value = laser_fit.params[param].spfit(my_dt), vary = False)
# But change the sky wavelength to lam0
sky_params['lam'].value = lam0
# Set up the forward model
L = 301
A_1D, lamvec = get_conv_matrix_1D(sky_params, annuli['r'][N0:N1], L, lam0)
# Come up with good initial guesses for the sky parameters
skyI_guess = sky_spectra[N0:N1].max() - sky_spectra[N0:N1].min()
skyB_guess = 0.0
ccdB_guess = sky_spectra[N0:N1].min()
sky_params.add('lamc', value = lam0, vary = True ) # We'll do a grid search to find a good value for lamc
sky_params.add('T', value = 1000, vary = True , min = 0.1, max = 5000.0)
sky_params.add('skyI', value = skyI_guess, vary = True , min = 0.0)
sky_params.add('skyB', value = skyB_guess, vary = True )
sky_params.add('skym', value = 0.0, vary = False) # Don't try to estimate skym (or should we?)
sky_params.add('ccdB', value = ccdB_guess, vary = True )
sky_params.add('lam0', value = lam0, vary = False) # This is determined by chemistry so it can't change.
# Do a grid search to find a good starting value for lamc.
# The instrument bias might mean that lam0 is a really bad value to start with.
# Search over one FSR around the last solved-for value (or the nominal value, if it's the first run).
# It's not really important to sort by direction, since the instrument bias dominates.
def goodness(lamc):
# return the correlation between the fringe with this lamc and the data
sky_params['lamc'].value = lamc
fringe = Sky_FringeModel(sky_params, annuli['r'][N0:N1], lamvec, A_1D)
return np.dot(fringe, sky_spectra[N0:N1])/(norm(fringe)*norm(sky_spectra[N0:N1]))
FSR = lam0**2/(2*sky_params['t'].value)
lamc_candidates = linspace(last_lamc - FSR/2, last_lamc + FSR/2, 50)
corrvec = np.array([goodness(lamc) for lamc in lamc_candidates])
best_lamc = lamc_candidates[corrvec.argmax()]
sky_params['lamc'].value = best_lamc
# Make a copy for future debugging
sky_params_init = Parameters()
for p in sky_params:
sky_params_init.add(p, value = sky_params[p].value)
# Take the inversion in steps
order = [
['skyI'],\
['skyI','ccdB'],\
['skyI', 'ccdB', 'skyB'], \
['skyI', 'ccdB', 'skyB', 'lamc', 'T'], \
]
for group in order: # set all the params in this group to "vary=True", and then run inversion
for param in sky_params.keys():
if param in group:
sky_params[param].vary = True
else:
sky_params[param].vary = False
sky_fit = Minimizer(Sky_Residual,sky_params,fcn_args=(annuli['r'][N0:N1],lamvec,A_1D), \
fcn_kws={'data': sky_spectra[N0:N1], 'sigma': sky_sigma[N0:N1]}, scale_covar=True)
sky_fit.prepare_fit()
sky_fit.leastsq()
# if the inversion failed, replace values with nans and set error bars to inf
if not sky_fit.success or not sky_fit.errorbars:
for p in sky_fit.params:
sky_fit.params[p].value = np.nan
sky_fit.params[p].stderr = np.inf
sky_redchi.append(sky_fit.redchi)
sky_out.append(sky_fit)
# Calculate wind calibration error
calerror_LOSwind.append(calc_wind_calib_error(my_dt))
# Transform from lamc to v and collect all parameters
lamc = sky_params['lamc'].value
sigma_lamc = sky_params['lamc'].stderr
v = c * (lamc/lam0 - 1)
sigma_v = c * sigma_lamc / lam0
LOSwind.append(v) # negative sign
sigma_LOSwind.append(sigma_v)
T.append(sky_fit.params['T'].value)
sigma_T.append(sky_fit.params['T'].stderr)
skyI.append(sky_params['skyI'].value)
sigma_skyI.append(sky_params['skyI'].stderr)
skyB.append(sky_params['skyB'].value)
sigma_skyB.append(sky_params['skyB'].stderr)
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + '%s reduced chisqr: %.4f\n' % (fname, sky_fit.redchi))
print('%s reduced chisqr: %.4f. Message:"%s"' % (fname, sky_fit.redchi, sky_fit.message))
# use the lamc value to center the next grid search, if the error bars are small enough
if sigma_v < SIGMA_V_THRESH and sigma_v > 0 and not lamc_start_set: # sigma_v==0 means it didn't converge
last_lamc = lamc # save for next time
lamc_start_set = True
#print 'v=%0.1f\tsigma_v=%.1f\tnext_init=%.1f' % (v, sigma_v, c*(last_lamc/lam0 - 1) )
else:
#logfile.write(datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S %p: ') + '%s is invalid and not analyzed\n' % fname)
print '%s is invalid and not analyzed' % fname
if len(sky) == 0:
print '%s: No usable sky images found.' % data_dir
raise Exception('%s: No usable sky images found.' % data_dir)
# Convert sky_params to array
n = len(sky_out)
sky_value = {}
sky_stderr = {}
for p in sky_out[-1].params:
sky_value[p] = np.zeros(n)
sky_stderr[p] = np.zeros(n)
for i in range(0,n):
sky_value[p][i] = sky_out[i].params[p].value
sky_stderr[p][i] = sky_out[i].params[p].stderr
fig = plt.figure(figsize=(5,3.5))
subplot(211)
#ax.errorbar(sky_times, LOSwind, yerr=sigma_LOSwind ,fmt='k.-')
#for direc in list(set(direction)):
for direc in ['Zenith']:
v = np.array([si for (si,d) in zip(LOSwind, direction) if d == direc])
vref = mean(v)
ve = np.array([si for (si,d) in zip(sigma_LOSwind, direction) if d == direc])
t = np.array([si for (si,d) in zip(sky_times, direction) if d == direc])
errorbar(t, v-vref, yerr=ve, fmt='k.-', label=direc)
xlim([sky_times[0] - datetime.timedelta(hours=0.5), sky_times[-1] + datetime.timedelta(hours=0.5)])
gca().xaxis.set_major_formatter(dates.DateFormatter('%H'))
legend(loc='best', prop={'size':6}, numpoints=1, ncol=3);
ylabel('Vertical wind\n(fit error)[m/s]')
#xlabel('Universal Time')
ylim((-150,150))
grid()
subplot(2,1,2)
for direc in ['Zenith']:
v = np.array([si for (si,d) in zip(LOSwind, direction) if d == direc])
vref = mean(v)
ve_fit = np.array([si for (si,d) in zip(sigma_LOSwind, direction) if d == direc])
ve_cal = np.array([si for (si,d) in zip(calerror_LOSwind, direction) if d == direc])
ve_tot = sqrt(ve_fit**2 + ve_cal**2)
t = np.array([si for (si,d) in zip(sky_times, direction) if d == direc])
errorbar(t, v-vref, yerr=ve_tot, fmt='k.-', label=direc)
ylim((-150,150))
xlim([sky_times[0] - datetime.timedelta(hours=0.5), sky_times[-1] + datetime.timedelta(hours=0.5)])
gca().xaxis.set_major_formatter(dates.DateFormatter('%H'))
ylabel('Vertical wind\n(total error) [m/s]')
grid()
tight_layout()
legend(loc='best', prop={'size':6}, numpoints=1, ncol=3);
im = Image.open('/home/bhardin2/ipython_figures/ANN_jumpy.png')
figure(figsize=(4,4))
imshow(im)
axis('off')
(-0.5, 1199.5, 1199.5, -0.5)
import subprocess
import shutil
nb_name = 'FPI_Gap_Misknowledge_Error.ipynb' # There's probably a way to get this programmatically
cmd = ['ipython', 'nbconvert', '--to', 'slides', nb_name, \
'--reveal-prefix', '"http://cdn.jsdelivr.net/reveal.js/2.6.2"',\
'--template','output_toggle']
# The --template command points to a custom template which opens and closes the input code cells with a click
subprocess.call(cmd)
html_name = '%s.slides.html' % (nb_name.split('.')[0])
shutil.copy(html_name, '/home/bhardin2/public_html/slideshows/')