psdlag-agn/scripts/analyze_lightcurve.py

126 lines
4.2 KiB
Python
Raw Permalink Normal View History

# -*- coding: utf-8 -*-
2017-02-01 02:21:25 +00:00
from __future__ import unicode_literals
2017-02-01 02:21:01 +00:00
import sys
import numpy as np
import getopt
2017-02-01 02:25:53 +00:00
sys.path.insert(1, "/home/caes/science/clag/")
import clag
2017-02-01 02:09:09 +00:00
# For jupyter notebook
# %pylab inline
try:
2017-02-01 02:09:09 +00:00
opts, args = getopt.getopt(sys.argv[1:], "")
except getopt.GetoptError:
print 'analyze_lightcure.py <reference curve> <compared curve>'
sys.exit(2)
2017-01-10 01:19:49 +00:00
# Time resolution determined from inspection and testing. This script
# does not expect evenly spaced data in time.
dt = 0.1
2017-02-01 02:09:09 +00:00
### Get the psd for the #fqL = np.hstack((np.array(0.5*f1),np.logspace(np.log10(0.9*f1),
2017-02-01 02:15:42 +00:00
# first light curve ###
2017-02-01 02:09:09 +00:00
# These bin values determined summer 2016 for STORM III optical/UV lightcurves
2017-02-01 02:09:09 +00:00
fqL = np.array([0.0049999999, 0.018619375, 0.044733049, 0.069336227,
0.10747115, 0.16658029, 0.25819945, 0.40020915,
0.62032418])
2017-01-30 03:24:33 +00:00
#A general rules for fqL is as follows:
#
# define f1, f2 as the two extreme frequencies allowed by the data. i.e.
# f1=1/T with T being the length of observation in time units, and
# f2=0.5/Δt
#
# The frequency limits where the psd/lag can be constrained are about
# ~0.9f10.5f2. The 0.9 factor doesn't depend on the data much, but
# values in the range ~[0.9-1.1] are ok. The factor in front of f2
# depends on the quality of the data, for low qualily data, use ~0.1--
# 0.2, and for high quality data, increase it up to 0.91.
#
# Always include two dummy bins at the low and high frequencies and
# ignore them. The first and last bins are generally biased, So I suggest
# using the first bin as 0.5f10.9f1 (or whatever value you use instead
# of 0.9f1, see second point above), and the last bin should be
# 0.5f22f2 (or whatever value instead of 0.5f2, see second point
# above). So the frequency bins should be something like:
# [0.5f1,0.9f1,...,0.5f2,2f2], the bins in between can be devided as
# desired.
#
#fqd is the bin center
#
# If lightcurves need to be split:
# seg_length = 256;
# fqL = np.logspace(np.log10(1.1/seg_length),np.log10(.5/dt),7)
# fqL = np.concatenate(([0.5/seg_length], fqL))
#
2017-02-01 02:09:09 +00:00
#f1 = 1/175.
#f2 = 0.5/dt
#fqL = np.hstack((np.array(0.5*f1),np.logspace(np.log10(0.9*f1),
# np.log10(0.3*f2),9),np.array(2*f2)))
2017-02-01 04:14:14 +00:00
fqL = np.logspace(np.log10(0.0049999999),np.log10(0.62032418),9)
nfq = len(fqL) - 1
2017-01-30 03:24:33 +00:00
fqd = 10**(np.log10( (fqL[:-1]*fqL[1:]) )/2.)
2017-01-30 03:24:33 +00:00
## load the first light curve
2017-02-01 02:18:40 +00:00
lc1_time, lc1_strength, lc1_strength_err = np.loadtxt(args[0],
skiprows=1).T
2017-01-30 03:24:33 +00:00
# for pylab: errorbar(t1,l1,yerr=l1e,fmt='o')
2017-01-30 03:24:33 +00:00
# Used throughout
2017-01-30 03:24:33 +00:00
## initialize the psd class for multiple light curves ##
2017-02-01 02:09:09 +00:00
P1 = clag.clag('psd10r',
[lc1_time], [lc1_strength], [lc1_strength_err],
dt, fqL)
2017-02-01 02:38:20 +00:00
ref_psd = np.ones(nfq)
ref_psd, ref_psd_err = clag.optimize(P1, ref_psd)
2017-01-30 03:24:33 +00:00
ref_psd, ref_psd_err = clag.errors(P1, ref_psd, ref_psd_err)
## plot ##
2017-01-30 03:24:33 +00:00
#xscale('log'); ylim(-4,2)
#errorbar(fqd, ref_psd, yerr=ref_psd_err, fmt='o', ms=10)
# Load second light curve
2017-01-30 03:52:54 +00:00
lc2_time, lc2_strength, lc2_strength_err = np.loadtxt(args[1],skiprows=1).T
2017-01-30 03:24:33 +00:00
P2 = clag.clag('psd10r', [lc2_time], [lc2_strength], [lc2_strength_err], dt, fqL)
2017-02-01 02:38:20 +00:00
echo_psd = np.ones(nfq)
echo_psd, echo_psd_err = clag.optimize(P2, echo_psd)
2017-01-30 03:24:33 +00:00
echo_psd, echo_psd_err = clag.errors(P2, echo_psd, echo_psd_err)
### Now the cross spectrum ###
### We also give it the calculated psd values as input ###
2017-01-30 03:24:33 +00:00
Cx = clag.clag('cxd10r',
2017-02-01 02:48:28 +00:00
[[lc1_time,lc2_time]],
2017-01-30 03:52:54 +00:00
[[lc1_strength,lc2_strength]],
2017-01-30 03:24:33 +00:00
[[lc1_strength_err,lc2_strength_err]],
dt, fqL, ref_psd, echo_psd)
2017-01-30 03:24:33 +00:00
#Cx_vals = np.concatenate( (0.3*(ref_psd*echo_psd)**0.5, ref_psd*0+1.) )
Cx_vals = np.concatenate( ((ref_psd+echo_psd)*0.5-0.3,ref_psd*0+0.1) )
Cx_vals, Cx_err = clag.optimize(Cx, Cx_vals)
#?????? %autoreload
2017-01-30 03:52:54 +00:00
Cx_vals, Cx_err = clag.errors(Cx,Cx_vals,Cx_err)
2017-01-30 03:24:33 +00:00
phi, phie = Cx_vals[nfq:], Cx_err[nfq:]
lag, lage = phi/(2*np.pi*fqd), phie/(2*np.pi*fqd)
2017-01-30 03:24:33 +00:00
cross_spectrm, cross_spectrm_err = Cx_vals[:nfq], Cx_err[:nfq]
np.savetxt("freq.out",fqL.reshape((-1,len(fqL))))
np.savetxt("ref_psd.out",[ref_psd,ref_psd_err])
np.savetxt("echo_psd.out",[echo_psd,echo_psd_err])
2017-01-30 03:24:33 +00:00
np.savetxt("crsspctrm.out",[cross_spectrm,cross_spectrm_err])
np.savetxt("timelag.out",[lag,lage])