mirror of
https://asciireactor.com/otho/industry-website.git
synced 2024-11-22 02:35:07 +00:00
Added old code examples.
This commit is contained in:
parent
25d53df531
commit
f3f2bd8de4
551
htdocs/examples/agn.hpp
Normal file
551
htdocs/examples/agn.hpp
Normal file
@ -0,0 +1,551 @@
|
||||
#ifndef agn_hpp
|
||||
#define agn_hpp
|
||||
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <iomanip>
|
||||
#include <string>
|
||||
#include <sstream>
|
||||
#include <cmath>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <list>
|
||||
#include <cctype>
|
||||
#include <cstdlib>
|
||||
#include <set>
|
||||
#include <cmath>
|
||||
|
||||
namespace agn {
|
||||
|
||||
const bool debug = false;
|
||||
const bool line_debug = false;
|
||||
const bool verbose=true;
|
||||
|
||||
// General constants
|
||||
const double PI=3.14159265358979323846;
|
||||
const double PLANCK_CONST=4.135668e-15; // in eV * s
|
||||
const double BOLTZMANN_CONST=0.00008617332385; // in eV / K
|
||||
const double RYDBERG_CONST=1.0973731568539e7; // in 1 / m
|
||||
const double RYDBERG_UNIT_EV=13.60569252; // in eV
|
||||
const double RYDBERG_UNIT_ANGSTROM=1e10/RYDBERG_CONST; // in A
|
||||
|
||||
// line label header lengths based on cloudy version
|
||||
// For cloudy 17, the characters reserved for the output quantities:
|
||||
// Label: 0-9
|
||||
// Wavelength: 10-20
|
||||
// radiated energy: 21-26
|
||||
// eq width: 27-36
|
||||
// Cloudy 13 had: 1,13; 17,5; 22,11;
|
||||
// c17: 0,18; 18,9; 27, 10
|
||||
|
||||
// c13 settings
|
||||
//int labelstr_pos = 1;
|
||||
//int labelstr_len = 13;
|
||||
//int radiatedenergystr_pos = 14;
|
||||
//int radiatedenergystr_len = 8;
|
||||
//int eqwidthstr_pos = 22;
|
||||
//int eqwidthstr_len = 9;
|
||||
|
||||
// c17 settings
|
||||
int LABELSTR_POS = 0;
|
||||
int LABELSTR_LEN = 18;
|
||||
int RADIATEDENERGYSTR_POS = 18;
|
||||
int RADIATEDENERGYSTR_LEN = 9;
|
||||
int EQWIDTHSTR_POS = 27;
|
||||
int EQWIDTHSTR_LEN = 10;
|
||||
|
||||
// Trims whitespace from line labels
|
||||
std::string label_trim(std::string);
|
||||
|
||||
// Some useful containers and functions.
|
||||
typedef std::map<double,double> table1d;
|
||||
typedef table1d::iterator iterator1d;
|
||||
std::string format_table1d(table1d);
|
||||
|
||||
typedef std::map<double,table1d> table2d;
|
||||
typedef table2d::iterator iterator2d;
|
||||
typedef std::pair<double,double> coord2d;
|
||||
typedef std::list<coord2d> gridcoordlist;
|
||||
|
||||
// Seeks an instream to the line after the first occurrence of seek_string.
|
||||
void seek_to(std::string,std::istream&);
|
||||
|
||||
|
||||
struct cloudy_line_data {
|
||||
double radiated_energy,eq_width;
|
||||
int index;
|
||||
bool has_duplicates;
|
||||
};
|
||||
|
||||
typedef std::map<std::string,cloudy_line_data> cloudy_line_output;
|
||||
|
||||
struct cloudy_result {
|
||||
std::string header, footer;
|
||||
std::list<std::string> intrinsic_line_raw_text;
|
||||
std::list<std::string> emergent_line_raw_text; // Not currently collected
|
||||
std::list<std::string> cautions;
|
||||
cloudy_line_output emergent_line_intensity;
|
||||
cloudy_line_output intrinsic_line_intensity;
|
||||
cloudy_line_output intrinsic_line_inward_intensity;
|
||||
int iterations;
|
||||
double phi,hden,colden;
|
||||
cloudy_result():
|
||||
header(""),
|
||||
footer(""),
|
||||
intrinsic_line_raw_text(),
|
||||
cautions(),
|
||||
iterations(0),
|
||||
phi(0),
|
||||
hden(0),
|
||||
colden(0)
|
||||
{}
|
||||
};
|
||||
|
||||
typedef std::map<coord2d,cloudy_result> cloudy_grid;
|
||||
|
||||
// Easiest to read the entire grid from the file at once.
|
||||
cloudy_grid read_cloudy_grid(std::ifstream&);
|
||||
|
||||
// Operator<< prints general info about the run result.
|
||||
std::ostream& operator<< (std::ostream&, cloudy_result);
|
||||
|
||||
|
||||
|
||||
} // end namespace agn
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// Definitions
|
||||
|
||||
|
||||
std::string agn::label_trim(std::string label) {
|
||||
std::stringstream newlabel;
|
||||
newlabel
|
||||
<< label[0]
|
||||
<< label[1]
|
||||
<< label[2]
|
||||
<< label[3]
|
||||
<< " "
|
||||
<< label[agn::LABELSTR_LEN-8]
|
||||
<< label[agn::LABELSTR_LEN-7]
|
||||
<< label[agn::LABELSTR_LEN-6]
|
||||
<< label[agn::LABELSTR_LEN-5]
|
||||
<< label[agn::LABELSTR_LEN-4]
|
||||
<< label[agn::LABELSTR_LEN-3]
|
||||
<< label[agn::LABELSTR_LEN-2]
|
||||
<< label[agn::LABELSTR_LEN-1];
|
||||
for (int i=agn::LABELSTR_LEN; i<label.length(); i++)
|
||||
newlabel << label[i];
|
||||
return newlabel.str();
|
||||
}
|
||||
|
||||
|
||||
|
||||
void agn::seek_to(std::string seek_string,std::istream& stream) {
|
||||
std::string test_str="";
|
||||
while (test_str.find(seek_string) == std::string::npos)
|
||||
getline(stream,test_str);
|
||||
}
|
||||
|
||||
|
||||
|
||||
std::string agn::format_table1d(agn::table1d table) {
|
||||
std::stringstream output;
|
||||
output
|
||||
<< std::scientific
|
||||
<< std::setprecision(5);
|
||||
agn::table1d::iterator x=table.begin();
|
||||
while(x!=table.end()) {
|
||||
output
|
||||
<< x->first
|
||||
<< "\t"
|
||||
<< x->second
|
||||
<< "\n";
|
||||
x++;
|
||||
}
|
||||
return output.str();
|
||||
}
|
||||
|
||||
std::ostream& agn::operator<< (std::ostream& outstream, cloudy_result output) {
|
||||
outstream.width(5);
|
||||
outstream
|
||||
<< std::fixed
|
||||
<< std::setprecision(2)
|
||||
<< output.hden
|
||||
<< " x "
|
||||
<< output.phi
|
||||
<< " (colden "
|
||||
<< std::fixed
|
||||
<< std::setprecision(0)
|
||||
<< output.colden
|
||||
<< "): "
|
||||
<< output.intrinsic_line_intensity.size()
|
||||
<< " intrinsic emission lines, "
|
||||
<< output.cautions.size()
|
||||
<< " cautions present.";
|
||||
return outstream;
|
||||
}
|
||||
|
||||
std::ifstream& operator>> (std::ifstream& inputfile,agn::cloudy_grid& grid) {
|
||||
grid = agn::read_cloudy_grid(inputfile);
|
||||
return inputfile;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
agn::cloudy_grid agn::read_cloudy_grid(std::ifstream& inputfile) {
|
||||
if(agn::debug) std::cout
|
||||
<< "Constructing cloudy output grid from file.\n";
|
||||
inputfile.clear();
|
||||
inputfile.seekg(0);
|
||||
std::string inputline;
|
||||
int line_num=0;
|
||||
getline(inputfile,inputline);
|
||||
std::string curstr = inputline.substr(0,23);
|
||||
std::string seek_string = "Producing grid output.";
|
||||
int pos = curstr.find(seek_string);
|
||||
while(line_num < 1000) {
|
||||
line_num++;
|
||||
getline(inputfile,inputline);
|
||||
curstr = inputline.substr(0,30);
|
||||
pos = inputline.find(seek_string);
|
||||
if (pos != std::string::npos) break;
|
||||
}
|
||||
if(agn::debug) std::cout
|
||||
<< "Collecting grid meta data starting on line "
|
||||
<< line_num
|
||||
<< std::endl;
|
||||
agn::gridcoordlist coordlist;
|
||||
std::pair<double,double> xy (0,0);
|
||||
// collect parameter coordinates
|
||||
seek_string="**************************************************";
|
||||
while(!inputfile.eof()) {
|
||||
while (inputline.find(seek_string) == std::string::npos)
|
||||
getline(inputfile,inputline);
|
||||
while(inputfile.peek() == ' ') inputfile.get(); // Skip blank space
|
||||
// Break for first grid
|
||||
if (inputline.find("GRID_DELIMIT") != std::string::npos) break;
|
||||
char nextchar = inputfile.peek();
|
||||
//bool hascoords = nextchar != '*' && nextchar != ' ' && nextchar != '\n';
|
||||
bool hascoords = isalnum(nextchar);
|
||||
if (hascoords) {
|
||||
//if(agn::debug) std::cout << "Has coords.\n";
|
||||
while (!isdigit(inputfile.peek())) inputfile.get();
|
||||
inputfile >> xy.first;
|
||||
while (!isdigit(inputfile.peek())) inputfile.get();
|
||||
inputfile >> xy.second;
|
||||
std::pair<double,double> newentry (xy);
|
||||
coordlist.push_back(newentry);
|
||||
getline(inputfile,inputline);
|
||||
}
|
||||
else {
|
||||
getline(inputfile,inputline);
|
||||
while (inputline.find(seek_string) == std::string::npos)
|
||||
getline(inputfile,inputline);
|
||||
}
|
||||
}
|
||||
if(agn::debug) std::cout
|
||||
<< "Reached first grid. "
|
||||
<< coordlist.size()
|
||||
<< " coordinate pairs found.\n";
|
||||
agn::cloudy_grid grid;
|
||||
agn::gridcoordlist::iterator coords = coordlist.begin();
|
||||
|
||||
while(grid.size() < coordlist.size()) {
|
||||
bool failmode = false;
|
||||
agn::cloudy_result point;
|
||||
point.hden = coords->first;
|
||||
point.phi = coords->second;
|
||||
seek_string = "Intrinsic line intensities";
|
||||
std::string fail_string = "PROBLEM DISASTER";
|
||||
if(agn::debug) std::cout
|
||||
<< "Grabbing header and cautions.";
|
||||
std::string header="";
|
||||
std::list<std::string> cautions;
|
||||
while (inputline.find(seek_string) == std::string::npos) {
|
||||
getline(inputfile,inputline);
|
||||
if (inputline.find(fail_string) != std::string::npos) {
|
||||
if(agn::debug) std::cout
|
||||
<< " Found broken model.";
|
||||
failmode = true;
|
||||
break;
|
||||
}
|
||||
header.append(inputline);
|
||||
header.append("\n");
|
||||
if((inputline[1] == 'C' && inputline[2] == '-')
|
||||
|| (inputline[1] == ' ' && inputline[2] == '!' ))
|
||||
cautions.push_back(inputline);
|
||||
}
|
||||
if (failmode) {
|
||||
grid[*coords] = point;
|
||||
coords++;
|
||||
//agn::seek_to("c ======================",inputfile);
|
||||
//agn::seek_to("c ======================",inputfile);
|
||||
continue;
|
||||
}
|
||||
point.header = header;
|
||||
point.cautions = cautions;
|
||||
std::stringstream headerstr;
|
||||
headerstr << header;
|
||||
std::string headerword="";
|
||||
headerstr.clear();
|
||||
headerstr.seekg(0);
|
||||
int iterations;
|
||||
while(!headerstr.eof()) {
|
||||
headerstr >> headerword;
|
||||
//std::cout << "looking for iterations in " << headerword << "\n";
|
||||
if(headerword == "Iteration") {
|
||||
headerstr >> iterations;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(agn::debug) std::cout
|
||||
<< " Found " << iterations << " iterations.";
|
||||
point.iterations = iterations;
|
||||
headerstr.clear();
|
||||
headerstr.seekg(0);
|
||||
double colden;
|
||||
while(!headerstr.eof()) {
|
||||
headerstr >> headerword;
|
||||
//std::cout << "checking "<<headerword<<"\n";
|
||||
if(headerword == "column") {
|
||||
headerstr >> headerword;
|
||||
//std::cout << "found "<<headerword;
|
||||
if(headerword == "density") {
|
||||
headerstr >> colden;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if(agn::debug) std::cout << std::setprecision(2)
|
||||
<< " Found 10^"<<colden<<" colden.";
|
||||
point.colden = colden;
|
||||
if(agn::debug) std::cout
|
||||
<< " Grabbing emission lines.";
|
||||
agn::seek_to("general properties...................",inputfile);
|
||||
std::list<std::string> intrinsic_line_raw_text;
|
||||
getline(inputfile,inputline);
|
||||
while (inputline != "") {
|
||||
if(inputline.find("..") != std::string::npos) {
|
||||
getline(inputfile,inputline);
|
||||
continue;
|
||||
}
|
||||
intrinsic_line_raw_text.push_back(inputline);
|
||||
getline(inputfile,inputline);
|
||||
}
|
||||
point.intrinsic_line_raw_text.swap(intrinsic_line_raw_text);
|
||||
std::list<std::string>::iterator linetext_it=point.intrinsic_line_raw_text.begin();
|
||||
std::list<std::string> duplicate_labels;
|
||||
int index=0;
|
||||
std::string label;
|
||||
// For cloudy 17, the characters reserved for the output quantities:
|
||||
// Label: 0-9
|
||||
// Wavelength: 10-20
|
||||
// radiated energy: 21-26
|
||||
// eq width: 27-36
|
||||
// Cloudy 13 had: 1,13; 17,5; 22,11;
|
||||
// c17: 0,18; 18,9; 27, 10
|
||||
|
||||
// c13 settings
|
||||
//int labelstr_pos = 1;
|
||||
//int labelstr_len = 13;
|
||||
//int radiatedenergystr_pos = 14;
|
||||
//int radiatedenergystr_len = 8;
|
||||
//int eqwidthstr_pos = 22;
|
||||
//int eqwidthstr_len = 9;
|
||||
|
||||
// c17 settings
|
||||
int labelstr_pos = 0;
|
||||
int labelstr_len = 18;
|
||||
int radiatedenergystr_pos = 18;
|
||||
int radiatedenergystr_len = 9;
|
||||
int eqwidthstr_pos = 27;
|
||||
int eqwidthstr_len = 10;
|
||||
|
||||
while(linetext_it != point.intrinsic_line_raw_text.end()) {
|
||||
// This section is also only written for the "final case" as alerted below
|
||||
if ((*linetext_it).substr(labelstr_pos,4) == "Inwd") {
|
||||
agn::cloudy_line_data inwddata;
|
||||
inwddata.index = ++index;
|
||||
inwddata.radiated_energy = atof((*linetext_it).substr(
|
||||
radiatedenergystr_pos,
|
||||
radiatedenergystr_len).c_str()
|
||||
);
|
||||
inwddata.eq_width = atof((*linetext_it).substr(
|
||||
eqwidthstr_pos,
|
||||
eqwidthstr_len).c_str()
|
||||
);
|
||||
if(line_debug) {
|
||||
std::cout
|
||||
<< std::setprecision(5)
|
||||
<< "Inwd "
|
||||
<< (*linetext_it).substr(5)
|
||||
<< ": "
|
||||
<< label
|
||||
<< "; "
|
||||
<< inwddata.radiated_energy
|
||||
<< " from "
|
||||
<< (*linetext_it).substr(
|
||||
radiatedenergystr_pos,
|
||||
radiatedenergystr_len).c_str()
|
||||
<< "; "
|
||||
<< inwddata.eq_width
|
||||
<< " from "
|
||||
<< (*linetext_it).substr(
|
||||
eqwidthstr_pos,
|
||||
eqwidthstr_len).c_str()
|
||||
<< "\n";
|
||||
}
|
||||
point.intrinsic_line_inward_intensity[label] = inwddata;
|
||||
linetext_it++;
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
label=(*linetext_it).substr(
|
||||
labelstr_pos,
|
||||
labelstr_len
|
||||
);
|
||||
agn::cloudy_line_data data;
|
||||
data.index = ++index;
|
||||
//std::stringstream values;
|
||||
data.radiated_energy = atof((*linetext_it).substr(
|
||||
radiatedenergystr_pos,
|
||||
radiatedenergystr_len).c_str()
|
||||
);
|
||||
data.eq_width = atof((*linetext_it).substr(
|
||||
eqwidthstr_pos,
|
||||
eqwidthstr_len).c_str()
|
||||
);
|
||||
if(line_debug) {
|
||||
std::cout
|
||||
<< std::setprecision(5)
|
||||
<< *linetext_it
|
||||
<< ": "
|
||||
<< label
|
||||
<< "; "
|
||||
<< data.radiated_energy
|
||||
<< " from "
|
||||
<< (*linetext_it).substr(
|
||||
radiatedenergystr_pos,
|
||||
radiatedenergystr_len).c_str()
|
||||
<< "; "
|
||||
<< data.eq_width
|
||||
<< " from "
|
||||
<< (*linetext_it).substr(
|
||||
eqwidthstr_pos,
|
||||
eqwidthstr_len).c_str()
|
||||
<< "\n";
|
||||
}
|
||||
if(point.intrinsic_line_intensity.count(label) == 0) {
|
||||
data.has_duplicates = false;
|
||||
point.intrinsic_line_intensity[label] = data;
|
||||
}
|
||||
else {
|
||||
duplicate_labels.push_back(label);
|
||||
std::stringstream zerolabel(label);
|
||||
zerolabel << " j=0";
|
||||
point.intrinsic_line_intensity[zerolabel.str()]
|
||||
= point.intrinsic_line_intensity[label];
|
||||
point.intrinsic_line_intensity[zerolabel.str()].has_duplicates = true;
|
||||
point.intrinsic_line_intensity.erase(label);
|
||||
int j=1;
|
||||
data.has_duplicates = true;
|
||||
while(true) {
|
||||
std::stringstream newlabel;
|
||||
newlabel << label;
|
||||
newlabel << " j=" << j;
|
||||
if(point.intrinsic_line_intensity.count(newlabel.str()) != 0) {
|
||||
j++;
|
||||
// RED ALERT
|
||||
// To ease work in summer 2017, this line sets the
|
||||
// final instance of the emission line value
|
||||
// to the original label, this was to get around
|
||||
// the 3645.00A and 3647.00A being reported with
|
||||
// different values in c17 version 1
|
||||
point.intrinsic_line_intensity[label] = data;
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
point.intrinsic_line_intensity[newlabel.str()] = data;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
linetext_it++;
|
||||
}
|
||||
if(agn::debug) std::cout << " Duplicates found: " << duplicate_labels.size();
|
||||
if(agn::line_debug) {
|
||||
std::list<std::string>::iterator dup_it = duplicate_labels.begin();
|
||||
while (dup_it != duplicate_labels.end() ) {
|
||||
std::cout << std::endl;
|
||||
std::cout << *dup_it;
|
||||
dup_it++;
|
||||
}
|
||||
std::cout << " Grabbing footer.";
|
||||
}
|
||||
while (inputline == "")
|
||||
getline(inputfile,inputline);
|
||||
std::string footer="";
|
||||
seek_string="GRID_DELIMIT";
|
||||
while (inputline.find(seek_string) == std::string::npos){
|
||||
footer.append(inputline);
|
||||
footer.append("\n");
|
||||
getline(inputfile,inputline);
|
||||
}
|
||||
if(agn::debug) std::cout
|
||||
<< "\nAdding point to grid ("
|
||||
<< grid.size()
|
||||
<< " of "
|
||||
<< coordlist.size()
|
||||
<< ") at "
|
||||
<< coords->first
|
||||
<< ": "
|
||||
<< point
|
||||
<< std::endl;
|
||||
grid[*coords] = point;
|
||||
coords++;
|
||||
}
|
||||
|
||||
if(agn::debug) std::cout
|
||||
<< "Grid captured. "
|
||||
<< grid.size()
|
||||
<< " points compiled.\n";
|
||||
return grid;
|
||||
}
|
||||
|
||||
|
||||
|
||||
#endif
|
125
htdocs/examples/analyze_lightcurve.py
Normal file
125
htdocs/examples/analyze_lightcurve.py
Normal file
@ -0,0 +1,125 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
import sys
|
||||
import numpy as np
|
||||
import getopt
|
||||
sys.path.insert(1, "/home/caes/science/clag/")
|
||||
import clag
|
||||
|
||||
|
||||
# For jupyter notebook
|
||||
# %pylab inline
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "")
|
||||
except getopt.GetoptError:
|
||||
print 'analyze_lightcure.py <reference curve> <compared curve>'
|
||||
sys.exit(2)
|
||||
|
||||
# Time resolution determined from inspection and testing. This script
|
||||
# does not expect evenly spaced data in time.
|
||||
dt = 0.1
|
||||
|
||||
### Get the psd for the #fqL = np.hstack((np.array(0.5*f1),np.logspace(np.log10(0.9*f1),
|
||||
# first light curve ###
|
||||
|
||||
|
||||
# These bin values determined summer 2016 for STORM III optical/UV lightcurves
|
||||
fqL = np.array([0.0049999999, 0.018619375, 0.044733049, 0.069336227,
|
||||
0.10747115, 0.16658029, 0.25819945, 0.40020915,
|
||||
0.62032418])
|
||||
|
||||
#A general rules for fqL is as follows:
|
||||
#
|
||||
# define f1, f2 as the two extreme frequencies allowed by the data. i.e.
|
||||
# f1=1/T with T being the length of observation in time units, and
|
||||
# f2=0.5/Δt
|
||||
#
|
||||
# The frequency limits where the psd/lag can be constrained are about
|
||||
# ~0.9f1−0.5f2. The 0.9 factor doesn't depend on the data much, but
|
||||
# values in the range ~[0.9-1.1] are ok. The factor in front of f2
|
||||
# depends on the quality of the data, for low qualily data, use ~0.1--
|
||||
# 0.2, and for high quality data, increase it up to 0.9−−1.
|
||||
#
|
||||
# Always include two dummy bins at the low and high frequencies and
|
||||
# ignore them. The first and last bins are generally biased, So I suggest
|
||||
# using the first bin as 0.5f1−0.9f1 (or whatever value you use instead
|
||||
# of 0.9f1, see second point above), and the last bin should be
|
||||
# 0.5f2−2∗f2 (or whatever value instead of 0.5f2, see second point
|
||||
# above). So the frequency bins should be something like:
|
||||
# [0.5f1,0.9f1,...,0.5f2,2f2], the bins in between can be devided as
|
||||
# desired.
|
||||
#
|
||||
#fqd is the bin center
|
||||
#
|
||||
# If lightcurves need to be split:
|
||||
# seg_length = 256;
|
||||
# fqL = np.logspace(np.log10(1.1/seg_length),np.log10(.5/dt),7)
|
||||
# fqL = np.concatenate(([0.5/seg_length], fqL))
|
||||
#
|
||||
|
||||
#f1 = 1/175.
|
||||
#f2 = 0.5/dt
|
||||
#fqL = np.hstack((np.array(0.5*f1),np.logspace(np.log10(0.9*f1),
|
||||
# np.log10(0.3*f2),9),np.array(2*f2)))
|
||||
fqL = np.logspace(np.log10(0.0049999999),np.log10(0.62032418),9)
|
||||
nfq = len(fqL) - 1
|
||||
fqd = 10**(np.log10( (fqL[:-1]*fqL[1:]) )/2.)
|
||||
|
||||
|
||||
## load the first light curve
|
||||
lc1_time, lc1_strength, lc1_strength_err = np.loadtxt(args[0],
|
||||
skiprows=1).T
|
||||
|
||||
# for pylab: errorbar(t1,l1,yerr=l1e,fmt='o')
|
||||
|
||||
# Used throughout
|
||||
|
||||
|
||||
## initialize the psd class for multiple light curves ##
|
||||
P1 = clag.clag('psd10r',
|
||||
[lc1_time], [lc1_strength], [lc1_strength_err],
|
||||
dt, fqL)
|
||||
ref_psd = np.ones(nfq)
|
||||
ref_psd, ref_psd_err = clag.optimize(P1, ref_psd)
|
||||
ref_psd, ref_psd_err = clag.errors(P1, ref_psd, ref_psd_err)
|
||||
|
||||
## plot ##
|
||||
#xscale('log'); ylim(-4,2)
|
||||
#errorbar(fqd, ref_psd, yerr=ref_psd_err, fmt='o', ms=10)
|
||||
|
||||
# Load second light curve
|
||||
lc2_time, lc2_strength, lc2_strength_err = np.loadtxt(args[1],skiprows=1).T
|
||||
P2 = clag.clag('psd10r', [lc2_time], [lc2_strength], [lc2_strength_err], dt, fqL)
|
||||
echo_psd = np.ones(nfq)
|
||||
echo_psd, echo_psd_err = clag.optimize(P2, echo_psd)
|
||||
echo_psd, echo_psd_err = clag.errors(P2, echo_psd, echo_psd_err)
|
||||
|
||||
|
||||
|
||||
### Now the cross spectrum ###
|
||||
### We also give it the calculated psd values as input ###
|
||||
Cx = clag.clag('cxd10r',
|
||||
[[lc1_time,lc2_time]],
|
||||
[[lc1_strength,lc2_strength]],
|
||||
[[lc1_strength_err,lc2_strength_err]],
|
||||
dt, fqL, ref_psd, echo_psd)
|
||||
|
||||
#Cx_vals = np.concatenate( (0.3*(ref_psd*echo_psd)**0.5, ref_psd*0+1.) )
|
||||
Cx_vals = np.concatenate( ((ref_psd+echo_psd)*0.5-0.3,ref_psd*0+0.1) )
|
||||
Cx_vals, Cx_err = clag.optimize(Cx, Cx_vals)
|
||||
|
||||
#?????? %autoreload
|
||||
Cx_vals, Cx_err = clag.errors(Cx,Cx_vals,Cx_err)
|
||||
|
||||
phi, phie = Cx_vals[nfq:], Cx_err[nfq:]
|
||||
lag, lage = phi/(2*np.pi*fqd), phie/(2*np.pi*fqd)
|
||||
cross_spectrm, cross_spectrm_err = Cx_vals[:nfq], Cx_err[:nfq]
|
||||
|
||||
np.savetxt("freq.out",fqL.reshape((-1,len(fqL))))
|
||||
np.savetxt("ref_psd.out",[ref_psd,ref_psd_err])
|
||||
np.savetxt("echo_psd.out",[echo_psd,echo_psd_err])
|
||||
np.savetxt("crsspctrm.out",[cross_spectrm,cross_spectrm_err])
|
||||
np.savetxt("timelag.out",[lag,lage])
|
||||
|
||||
|
171
htdocs/examples/analyze_lightcurves.sh
Executable file
171
htdocs/examples/analyze_lightcurves.sh
Executable file
@ -0,0 +1,171 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#!/usr/bin/env bash
|
||||
|
||||
timestep="0.1"
|
||||
ref_band='1367Å'
|
||||
# Parameters for HST(𝛌=1367Å) from MC results
|
||||
refpsd_params="6.533e-02 -9.694e-02 -1.175e+00 -1.525e+00 -2.166e+00 -2.492e+00 -3.258e+00 -9.328e+00"
|
||||
# Parameters for HST(𝛌=1367Å) from covariance matrix estimates
|
||||
#refpsd_params="7.376e-02 -1.976e-01 -1.182e+00 -1.521e+00 -2.144e+00 -2.503e+00 -3.580e+00 -1.233e+01"
|
||||
|
||||
|
||||
error_type="0"
|
||||
# error types:
|
||||
# 0 for covariance matrix, 1 for likelihood function, 2 for monte carlo
|
||||
case $error_type in
|
||||
"0") err_src="CM";;
|
||||
"1") err_src="LF";;
|
||||
"2") err_src="MC";;
|
||||
esac
|
||||
|
||||
mkdir -p analyses
|
||||
|
||||
host=$1
|
||||
|
||||
if [[ "$(hostname)" == "thor.cs.wmich.edu" ]]
|
||||
then
|
||||
host="thor"
|
||||
fi
|
||||
|
||||
if [[ $host == "thor" ]]
|
||||
then
|
||||
echo Setting up Thor environment.
|
||||
mkdir -p thor
|
||||
mkdir -p thor/arguments
|
||||
if [[ -e thor/submissions ]]; then echo "" >> thor/submissions; fi
|
||||
date >> thor/submissions
|
||||
echo "─────────────────────────────" >> thor/submissions
|
||||
fi
|
||||
|
||||
lc_dir="data/STORM_III/lightcurves/Δt=${timestep}"
|
||||
echo Using lightcurves in $lc_dir.
|
||||
ref_curve="${lc_dir}/${ref_band}.lc"
|
||||
|
||||
for echo_curve in $lc_dir/*
|
||||
do
|
||||
# Determine band and inputs for band
|
||||
echo_band=$(basename $echo_curve|sed 's@\(.*\)\.lc@\1@')
|
||||
echo -n $(date "+%R")\: Running psdlag for $echo_band against $ref_band.
|
||||
outputfile="analyses/${ref_band}_≺_${echo_band}_{Δt=${timestep};σ∊${err_src}}"
|
||||
case $echo_band in
|
||||
"4775Å")
|
||||
initial_params="$refpsd_params -9.745e-01 -1.384e+00 -2.748e+00 -3.305e+00 -3.314e+00 -3.389e+00 -4.198e+00 -4.465e+00 -4.700e-01 -7.487e-01 -2.046e+00 -2.428e+00 -2.953e+00 -3.086e+00 -3.761e+00 -4.290e+00 9.862e-02 3.899e-01 8.650e-01 5.516e-01 2.228e-01 9.508e-01 -2.872e-01 9.059e-02"
|
||||
;;
|
||||
"1158Å")
|
||||
initial_params="$refpsd_params 2.504e-01 3.210e-02 -9.091e-01 -1.226e+00 -1.790e+00 -2.227e+00 -2.929e+00 -5.131e+00 1.618e-01 -8.299e-02 -1.045e+00 -1.370e+00 -1.952e+00 -2.338e+00 -3.075e+00 -3.742e+00 -3.301e-02 -7.353e-02 -7.112e-02 -5.938e-02 -4.394e-02 -1.756e-01 -2.617e-01 -1.425e-01"
|
||||
;;
|
||||
"1367Å")
|
||||
initial_params="$refpsd_params 6.534e-02 -9.689e-02 -1.175e+00 -1.525e+00 -2.167e+00 -2.492e+00 -3.258e+00 -1.115e+01 6.378e-02 -9.676e-02 -1.181e+00 -1.520e+00 -2.180e+00 -2.532e+00 -3.129e+00 -3.794e+00 -8.535e-05 -5.126e-04 -4.066e-04 1.080e-03 -2.682e-03 -5.462e-03 -2.209e-02 1.564e-01"
|
||||
;;
|
||||
"1479Å")
|
||||
initial_params="$refpsd_params 0.2e+00 -0.2e+00 -1.0e+00 -1.500e+00 -2.0e+00 -2.6e+00 -3.4e+00 -6e+00 9.340e-02 -8.842e-02 -1.173e+00 -1.512e+00 -2.113e+00 -2.547e+00 -3.178e+00 -7.000e+00 -6.035e-02 2.044e-02 1.258e-01 5.764e-02 2.580e-02 -4.737e-02 9.579e-02 6.411e-01"
|
||||
;;
|
||||
"1746Å")
|
||||
initial_params="$refpsd_params 3.934e-02 -1.638e-01 -1.339e+00 -1.655e+00 -2.232e+00 -2.793e+00 -4.025e+00 -1.072e+01 5.043e-02 -1.304e-01 -1.254e+00 -1.587e+00 -2.207e+00 -2.693e+00 -3.209e+00 -3.820e+00 3.040e-02 2.438e-02 1.450e-01 1.458e-01 1.772e-01 2.076e-01 -1.390e-01 3.929e-01"
|
||||
;;
|
||||
"7647Å")
|
||||
initial_params="$refpsd_params -1.510e+00 -1.872e+00 -3.441e+00 -3.722e+00 -4.794e+00 -4.336e+00 -4.644e+00 -5.061e+00 -7.402e-01 -9.967e-01 -2.393e+00 -2.723e+00 -3.701e+00 -3.742e+00 -4.317e+00 -4.979e+00 2.638e-01 7.095e-01 1.297e+00 1.035e+00 9.267e-01 8.982e-01 7.318e-01 9.092e-01"
|
||||
;;
|
||||
"8560Å")
|
||||
initial_params="$refpsd_params -1.497e+00 -1.786e+00 -3.555e+00 -4.203e+00 -3.826e+00 -4.096e+00 -4.304e+00 -1.109e+01 -7.542e-01 -9.532e-01 -2.397e+00 -3.214e+00 -3.008e+00 -3.489e+00 -4.353e+00 -4.061e+00 1.675e-01 7.660e-01 1.781e+00 1.971e+00 6.572e-01 8.520e-01 2.405e+00 -1.573e-01"
|
||||
;;
|
||||
"6175Å")
|
||||
initial_params="$refpsd_params -1.372e+00 -1.786e+00 -3.214e+00 -3.558e+00 -4.416e+00 -4.366e+00 -4.559e+00 -4.586e+00 -6.693e-01 -5.998e+00 -2.390e+00 -2.927e+00 -3.411e+00 -3.731e+00 -3.930e+00 -7.000e+00 2.905e-01 1.835e+00 8.760e-01 1.476e+00 -3.280e-01 1.439e+00 9.464e-02 1.532e+00"
|
||||
;;
|
||||
"6439Å")
|
||||
initial_params="$refpsd_params -1.287e+00 -1.580e+00 -2.879e+00 -3.569e+00 -3.661e+00 -4.471e+00 -3.998e+00 -5.001e+00 -6.583e-01 -6.900e+00 -2.060e+00 -2.727e+00 -2.954e+00 -3.654e+00 -4.406e+00 -5.203e+00 1.448e-01 -1.004e-01 9.754e-01 1.140e+00 9.284e-01 1.116e+00 2.092e+00 8.634e-01"
|
||||
;;
|
||||
"4392Å")
|
||||
initial_params="$refpsd_params -5.0e-01 -1.0e+00 -2.7e+00 -3.7e+00 -4.5e+00 -5.5e+00 -6.5e+00 -7.5e+00 -2.515e-01 -6.156e-01 -1.924e+00 -2.417e+00 -3.053e+00 -6.570e+00 -3.450e+00 -6.333e+00 7.408e-02 3.089e-01 5.057e-01 4.831e-01 5.779e-01 6.277e-01 4.823e-01 6.110e-01"
|
||||
;;
|
||||
"3465Å")
|
||||
initial_params="$refpsd_params -3.178e-01 -7.537e-01 -2.402e+00 -2.624e+00 -3.413e+00 -3.834e+00 -1.236e+01 -1.071e+01 -1.376e-01 -4.256e-01 -1.794e+00 -2.123e+00 -2.817e+00 -3.029e+00 -4.139e+00 -7.000e+00 7.840e-02 3.388e-01 6.616e-01 1.349e-01 9.909e-01 8.538e-01 1.397e+00 6.412e-01"
|
||||
;;
|
||||
"5468Å")
|
||||
initial_params="$refpsd_params -8.802e-01 -1.441e+00 -2.883e+00 -4.483e+00 -1.045e+01 -1.041e+01 -1.042e+01 -1.078e+01 -4.161e-01 -7.660e-01 -2.020e+00 -3.201e+00 -2.699e+00 -2.859e+00 -3.653e+00 -5.501e+00 1.900e-02 3.560e-01 1.007e+00 -1.270e+00 1.858e+00 9.302e-01 -1.502e+00 2.653e+00"
|
||||
;;
|
||||
"3471Å")
|
||||
initial_params="$refpsd_params -4.289e-01 -9.017e-01 -2.581e+00 -2.751e+00 -2.764e+00 -3.105e+00 -3.298e+00 -3.870e+00 -1.925e-01 -5.093e-01 -1.923e+00 -2.204e+00 -2.568e+00 -2.768e+00 -3.425e+00 -5.982e+00 1.646e-01 4.376e-01 7.202e-01 9.556e-01 4.856e-01 2.985e-01 -6.801e-01 -2.416e+00"
|
||||
;;
|
||||
"2246Å")
|
||||
initial_params="$refpsd_params -1.003e-01 -5.235e-01 -1.863e+00 -2.159e+00 -2.718e+00 -3.504e+00 -1.139e+01 -4.119e+00 -2.431e-02 -3.121e-01 -1.526e+00 -1.877e+00 -2.455e+00 -2.945e+00 -3.356e+00 -3.602e+00 -1.862e-04 1.380e-01 2.475e-01 3.102e-02 6.040e-01 -1.488e-01 1.171e+00 1.617e+00"
|
||||
;;
|
||||
"2600Å")
|
||||
initial_params="$refpsd_params -2.417e-01 -7.027e-01 -1.998e+00 -2.387e+00 -2.912e+00 -4.026e+00 -1.069e+01 -9.716e+00 -9.867e-02 -4.150e-01 -1.587e+00 -1.963e+00 -2.539e+00 -3.274e+00 -3.460e+00 -6.742e+00 8.0e-01 1.05 1.2 6.734e-02 2.959e-01 1.111e-01 -2.364e-01 2.463e-01"
|
||||
;;
|
||||
"1928Å")
|
||||
initial_params="$refpsd_params -1.648e-01 -4.974e-01 -1.686e+00 -1.897e+00 -2.600e+00 -2.801e+00 -3.814e+00 -9.888e+00 -6.101e-02 -3.020e-01 -1.433e+00 -1.756e+00 -2.485e+00 -2.799e+00 -3.317e+00 -7.000e+00 4.622e-02 1.473e-01 2.243e-01 1.606e-01 2.365e-01 1.484e-01 8.110e-01 9.689e-01"
|
||||
;;
|
||||
"5404Å")
|
||||
initial_params="$refpsd_params -1.044e+00 -1.457e+00 -2.848e+00 -2.957e+00 -4.232e+00 -4.052e+00 -4.515e+00 -5.250e+00 -5.026e-01 -7.878e-01 -2.118e+00 -2.276e+00 -3.421e+00 -3.378e+00 -4.224e+00 -4.930e+00 9.142e-02 4.814e-01 8.550e-01 3.929e-01 3.975e-01 5.311e-01 -1.203e-01 -2.905e+00"
|
||||
;;
|
||||
"9157Å")
|
||||
initial_params="$refpsd_params -1.852e+00 -2.156e+00 -3.798e+00 -4.146e+00 -4.577e+00 -4.338e+00 -4.672e+00 -5.230e+00 -9.339e-01 -1.131e+00 -2.536e+00 -3.031e+00 -4.060e+00 -3.801e+00 -4.139e+00 -5.151e+00 2.445e-01 8.131e-01 1.621e+00 1.460e+00 2.909e-01 6.991e-01 -5.831e-01 -2.921e-01"
|
||||
;;
|
||||
*)
|
||||
echo -n " Unknown band: using default parameters."
|
||||
initial_params="$refpsd_params -9.745e-01 -1.384e+00 -2.748e+00 -3.305e+00 -3.314e+00 -3.389e+00 -4.198e+00 -4.465e+00 -4.700e-01 -7.487e-01 -2.046e+00 -2.428e+00 -2.953e+00 -3.086e+00 -3.761e+00 -4.290e+00 9.862e-02 3.899e-01 8.650e-01 5.516e-01 2.228e-01 9.508e-01 -2.872e-01 9.059e-02"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Write temporary input file
|
||||
echo "2" > tmp.psdlagargs
|
||||
echo "$ref_curve 0" >> tmp.psdlagargs
|
||||
echo "$echo_curve 0" >> tmp.psdlagargs
|
||||
echo "8 0.0049999999 0.018619375 0.044733049 0.069336227 0.10747115 0.16658029 0.25819945 0.40020915 0.62032418" >> tmp.psdlagargs
|
||||
echo "0" >> tmp.psdlagargs
|
||||
echo "$initial_params" >> tmp.psdlagargs
|
||||
echo "0:0 0" >> tmp.psdlagargs
|
||||
echo $error_type >> tmp.psdlagargs
|
||||
echo "0" >> tmp.psdlagargs
|
||||
# Use 10x num vars for walkers
|
||||
echo -n "1000 50 400 mcmc_${echo_band}.dat" >> tmp.psdlagargs
|
||||
|
||||
# Run psdlag with inputs
|
||||
if [[ $host == "thor" || "$(hostname)" == "thor.cs.wmich.edu" ]]
|
||||
then
|
||||
echo_band_noUTF=$(echo $echo_band|
|
||||
#sed 's|𝛌||g'|
|
||||
#sed 's|(|_|g'|
|
||||
#sed 's|)|_|g'|
|
||||
sed 's|=||g'|
|
||||
sed 's|Å|A|g')
|
||||
outputfile_noUTF=$(echo $outputfile|
|
||||
#sed 's|𝛌||g'|
|
||||
#sed 's|(|_|g'|
|
||||
#sed 's|)|_|g'|
|
||||
sed 's|=||g'|
|
||||
sed 's|[{}]|_|g'|
|
||||
sed 's|;|_|'|
|
||||
sed 's|Å|A|g'|
|
||||
sed 's|_≺_|_|g'|
|
||||
sed 's|σ∊|err|')
|
||||
if [[ -e $outputfile_noUTF ]]
|
||||
then
|
||||
echo -n " Analysis already exists; skipping."
|
||||
else
|
||||
argsfile="thor/arguments/$echo_band_noUTF.args"
|
||||
submitscript="thor/${echo_band_noUTF}.pbs"
|
||||
cp tmp.psdlagargs $argsfile
|
||||
cat scripts/templates/thor_submit.pbs|
|
||||
sed "s@%NAME%@psdlag_${echo_band_noUTF}@g"|
|
||||
sed "s@%ARGSFILE%@${argsfile}@g"|
|
||||
sed "s@%ROOTDIR%@$(pwd)@g"|
|
||||
sed "s@%OUTPUTFILE%@${outputfile_noUTF}@g" > $submitscript
|
||||
cd thor
|
||||
qsub $(basename $submitscript) >> submissions
|
||||
cd ..
|
||||
fi
|
||||
else
|
||||
if [[ -e $outputfile ]]
|
||||
then
|
||||
echo -n " Analysis already exists; skipping."
|
||||
else
|
||||
#time bin/psdlag tmp.psdlagargs > $outputfile
|
||||
time python scripts/analyze_lightcurve.py "$ref_curve $echo_curve"
|
||||
scripts/extract_tables.pl
|
||||
|
||||
fi
|
||||
fi
|
||||
echo ""
|
||||
done
|
40
htdocs/examples/backup.sh
Executable file
40
htdocs/examples/backup.sh
Executable file
@ -0,0 +1,40 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
port_number="46000"
|
||||
user="caes"
|
||||
host="omitted"
|
||||
|
||||
|
||||
#uncompressed
|
||||
cella_dir="/piscina/cella"
|
||||
cella_exclude="jeni video"
|
||||
|
||||
rsync -avz -e "ssh -p $port_number" $(echo --exclude $(echo "${cella_exclude}"|sed 's/\ / --exclude /g')) ${cella_dir} ${user}@${host}:/mnt/data/backup/pergamum/
|
||||
|
||||
biblio_dir="/piscina/biblio"
|
||||
rsync -avz -e "ssh -p $port_number" ${biblio_dir} ${user}@${host}:/mnt/data/backup/pergamum/
|
||||
|
||||
chronica_dir="/piscina/chronica"
|
||||
rsync -avz -e "ssh -p $port_number" ${chronica_dir} ${user}@${host}:/mnt/data/backup/pergamum/
|
||||
|
||||
|
||||
|
||||
#moved to uncompressed, left for reference
|
||||
|
||||
#compressed
|
||||
#git_dirs="/git/*"
|
||||
#git_exclude=""
|
||||
|
||||
#tar -cJf /piscina/backup_stage/git.tar.xz ${git_dirs}
|
||||
|
||||
#cd /usr/local/games/dev/
|
||||
#tar --exclude="${gamesdev_exclude}" -cJf gamesdev.tar.xz ${gamesdev_dirs}
|
||||
#mv -v gamesdev.tar.xz /usr/local/backup
|
||||
|
||||
# Send all staged files.
|
||||
#rsync -avz -e "ssh -p $port_number" /piscina/backup_stage/* ${user}@${host}:/mnt/data/backup/pergamum/
|
||||
|
||||
|
||||
|
||||
# Report time
|
||||
echo "Backup Script Completed At: `date`" >> ~/sk/backup.sh.output
|
75
htdocs/examples/biaxial_stress_test.sh
Executable file
75
htdocs/examples/biaxial_stress_test.sh
Executable file
@ -0,0 +1,75 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This program will statically apply biaxial strain on a sample or samples
|
||||
# by some delta-strain.
|
||||
|
||||
# Usage: ./biaxial_stress_test.sh <root dir> [source dir]
|
||||
|
||||
# For any samples found under [source dir] (or current directory if
|
||||
# [source dir] is not supplied), this program will search for LAMMPS input
|
||||
# called "minimized.input." This program assumes the structure to be a cell
|
||||
# intended for periodic boundary conditions.
|
||||
|
||||
# For each of these inputs, the program will call "stretch.x," a c++
|
||||
# program that will created a new LAMMPS input file with a box size larger
|
||||
# by delta-strain in the x and y dimensions, and transform the atomic
|
||||
# coordinates so that each atom's position relative to the simulation box
|
||||
# remains unchanged.
|
||||
|
||||
# The program will then run a minimization and repeat this process until
|
||||
# a maximum strain is reached.
|
||||
|
||||
script_dir=$( cd $(dirname $0) ; pwd -P |sed 's@^\(.*\)/scripts.*@\1/scripts@')
|
||||
bin_dir="$script_dir/../bin"
|
||||
root_dir=$1
|
||||
if [[ -z $2 ]]; then source_dir=$(pwd); else source_dir=$2; fi
|
||||
|
||||
source $script_dir/func/utility.src
|
||||
source $script_dir/func/input_parsing.src
|
||||
|
||||
input_files=$(find $source_dir -name 'minimized.input')
|
||||
num_files=$(echo $input_files|wc -l)
|
||||
|
||||
echo Found $num_files input files. Creating initial states.
|
||||
|
||||
origin_dir=$(pwd)
|
||||
|
||||
count="0"
|
||||
for input_file in $input_files; do
|
||||
# create new directory and set initial.input.
|
||||
file_name=$(basename $input_file)
|
||||
sample_dir=$(dirname $input_file)
|
||||
new_dir=$(echo $sample_dir|sed "s@^$source_dir@$root_dir/@"|sed 's@/*$@@')
|
||||
mkdir -p $new_dir
|
||||
cp $input_file $new_dir/initial.input
|
||||
|
||||
cd $new_dir
|
||||
|
||||
simulation_box_lengths initial.input > box_lengths.tmp
|
||||
read Lx Ly Lz < box_lengths.tmp
|
||||
|
||||
# Setup strain lengths because cluster doesn't have bc.
|
||||
strain_sequence=$(seq 5 5 500|
|
||||
sed 's/^\([0-9]\)$/00\1/'|
|
||||
sed 's/^\([0-9]\)\([0-9]\)$/0\1\2/')
|
||||
|
||||
> strain_lengths.tmp
|
||||
|
||||
pwd
|
||||
|
||||
for strain in $strain_sequence; do
|
||||
new_Lx=$(bc <<< "scale=10; $Lx * (1.$strain)")
|
||||
new_Ly=$(bc <<< "scale=10; $Ly * (1.$strain)")
|
||||
echo "STRAIN=$strain $new_Lx $new_Ly" >> strain_lengths.tmp
|
||||
done
|
||||
|
||||
|
||||
#sbatch $script_dir/cluster/strain_loop.sub .
|
||||
$script_dir/run/apply_strain.sh .
|
||||
(( count++ ))
|
||||
|
||||
cd $origin_dir
|
||||
done
|
||||
|
||||
|
||||
echo $count threads submitted.
|
13886
htdocs/examples/clag_analysis-origbins-3471A.html
Normal file
13886
htdocs/examples/clag_analysis-origbins-3471A.html
Normal file
File diff suppressed because it is too large
Load Diff
132
htdocs/examples/create_fort_files.cpp
Normal file
132
htdocs/examples/create_fort_files.cpp
Normal file
@ -0,0 +1,132 @@
|
||||
#include "agn.hpp"
|
||||
#include "spectral_lines.hpp"
|
||||
|
||||
// When this number of iterations is achieved, the program flags it
|
||||
// as a possible failure.
|
||||
double MAX_ITERATIONS = 15;
|
||||
|
||||
|
||||
int main(int argc, char const *argv[]) {
|
||||
std::ifstream cloudy_result_file;
|
||||
cloudy_result_file.open(argv[1]);
|
||||
|
||||
std::cout
|
||||
<< "Reading cloudy grid from "
|
||||
<< argv[1]
|
||||
<< ".\n";
|
||||
agn::cloudy_grid grid = agn::read_cloudy_grid(cloudy_result_file);
|
||||
|
||||
std::cout
|
||||
<< "Reading line list from "
|
||||
<< argv[2]
|
||||
<< ".\n";
|
||||
std::ifstream line_list_file;
|
||||
line_list_file.open(argv[2]);
|
||||
agn::line_list lines_to_print = agn::read_line_list(line_list_file);
|
||||
std::cout
|
||||
<< "Compiling table2ds for "
|
||||
<< lines_to_print.size()
|
||||
<< " emission lines and continuum bins.\n";
|
||||
std::list<agn::eqwidth_table> tables =
|
||||
agn::compile_eqwidth_tables(grid,lines_to_print,1215.00);
|
||||
|
||||
|
||||
// Remove any tables that are zero.
|
||||
std::list<agn::eqwidth_table>::iterator table_it = tables.begin();
|
||||
std::ofstream zeroreport;
|
||||
zeroreport.open("zero_report");
|
||||
zeroreport << "These headers were pulled from tables"
|
||||
<< " that returned minimum values."
|
||||
<< std::endl;
|
||||
int num_zeroes=0;
|
||||
while(table_it != tables.end()) {
|
||||
if(agn::is_zero(*table_it)) {
|
||||
std::string linetype = table_it->header[0].substr(0,4);
|
||||
//if (linetype == "nFnu" ||
|
||||
// linetype == "nInu" ||
|
||||
// linetype == "InwT" ||
|
||||
// linetype == "InwC") {
|
||||
// table_it++;
|
||||
// continue;
|
||||
//}
|
||||
|
||||
zeroreport << " "
|
||||
<< table_it->header[0]
|
||||
<< std::endl;
|
||||
num_zeroes++;
|
||||
//table_it = tables.erase(table_it);
|
||||
table_it++; // needed since the erase() would have incremented
|
||||
continue;
|
||||
}
|
||||
|
||||
table_it++;
|
||||
}
|
||||
std::cout << "Removed (currently disabled) "
|
||||
<< num_zeroes
|
||||
<< " tables from the list because"
|
||||
<< " they had zero value."
|
||||
<< std::endl;
|
||||
|
||||
// Write the tables to the fortfile stack.
|
||||
std::cout
|
||||
<< "Printing "
|
||||
<< tables.size()
|
||||
<< " tables to fortfiles.\n";
|
||||
table_it = tables.begin();
|
||||
int fortfilenum=11;
|
||||
while(table_it != tables.end()) {
|
||||
std::ofstream outfile;
|
||||
std::stringstream filename;
|
||||
filename << "fort.";
|
||||
filename << fortfilenum;
|
||||
outfile.open(filename.str().c_str());
|
||||
outfile << *table_it;
|
||||
outfile.close();
|
||||
table_it++;
|
||||
fortfilenum++;
|
||||
}
|
||||
|
||||
int num_unconverged = 0;
|
||||
agn::cloudy_grid::iterator result_it = grid.begin();
|
||||
std::ofstream cautionreportfile;
|
||||
cautionreportfile.open("cautions");
|
||||
cautionreportfile
|
||||
<< "The following solutions probably did not converge."
|
||||
<< std::endl << std::endl;
|
||||
while(result_it != grid.end()) {
|
||||
if (result_it->second.iterations >= MAX_ITERATIONS) {
|
||||
num_unconverged++;
|
||||
cautionreportfile
|
||||
<< "hden = "
|
||||
<< std::fixed
|
||||
<< std::setprecision(3)
|
||||
<< result_it->second.hden
|
||||
<< ", phi = "
|
||||
<< result_it->second.phi
|
||||
<< std::endl
|
||||
<< "───────────────────────────"
|
||||
<< std::endl;
|
||||
std::list<std::string>::iterator caution_it =
|
||||
result_it->second.cautions.begin();
|
||||
while(caution_it != result_it->second.cautions.end()) {
|
||||
cautionreportfile
|
||||
<< *caution_it
|
||||
<< std::endl;
|
||||
caution_it++;
|
||||
}
|
||||
cautionreportfile << std::endl << std::endl;
|
||||
}
|
||||
result_it++;
|
||||
}
|
||||
|
||||
std::cout
|
||||
<< "Saved cautions for "
|
||||
<< num_unconverged
|
||||
<< " unconverged solutions."
|
||||
<< std::endl;
|
||||
cautionreportfile.close();
|
||||
|
||||
std::cout << "Done.\n";
|
||||
return 0;
|
||||
}
|
||||
|
705
htdocs/examples/curriculum_vitae.tex
Normal file
705
htdocs/examples/curriculum_vitae.tex
Normal file
@ -0,0 +1,705 @@
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% Wilson Resume/CV
|
||||
% XeLaTeX Template
|
||||
% Version 1.0 (22/1/2015)
|
||||
%
|
||||
% This template has been downloaded from:
|
||||
% http://www.LaTeXTemplates.com
|
||||
%
|
||||
% Original author:
|
||||
% Howard Wilson (https://github.com/watsonbox/cv_template_2004) with
|
||||
% extensive modifications by Vel (vel@latextemplates.com)
|
||||
%
|
||||
% License:
|
||||
% CC BY-NC-SA 3.0 (http://creativecommons.org/licenses/by-nc-sa/3.0/)
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%----------------------------------------------------------------------------------------
|
||||
% PACKAGES AND OTHER DOCUMENT CONFIGURATIONS
|
||||
%----------------------------------------------------------------------------------------
|
||||
|
||||
\documentclass[10pt]{article} % Default font size
|
||||
|
||||
\input{structure.tex} % Include the file specifying document layout
|
||||
|
||||
%----------------------------------------------------------------------------------------
|
||||
|
||||
\begin{document}
|
||||
|
||||
%----------------------------------------------------------------------------------------
|
||||
% NAME AND CONTACT INFORMATION
|
||||
%----------------------------------------------------------------------------------------
|
||||
|
||||
\title{Otho Ulrich -- Curriculum Vitae} % Print the main header
|
||||
|
||||
%------------------------------------------------
|
||||
|
||||
\parbox{0.5\textwidth}{ % First block
|
||||
\begin{tabbing} % Enables tabbing
|
||||
\hspace{3cm} \= \hspace{4cm} \= \kill % Spacing within the block
|
||||
{\bf Address} \> 130 Penmoken Park\\ % Address line 1
|
||||
\> Lexington, KY 40503 % Address line 2
|
||||
%{\bf Nationality} \> British % Nationality
|
||||
\end{tabbing}}
|
||||
\hfill % Horizontal space between the two blocks
|
||||
\parbox{0.5\textwidth}{ % Second block
|
||||
\begin{tabbing} % Enables tabbing
|
||||
\hspace{3cm} \= \hspace{4cm} \= \kill % Spacing within the block
|
||||
%{\bf Home Phone} \> +0 (000) 111 1111 \\ % Home phone
|
||||
{\bf Mobile Phone} \> (734) 272 3363 \\ % Mobile phone
|
||||
{\bf Email} \> \href{mailto:othoulrich@uky.edu}{othoulrich@gmail.com} % Email address
|
||||
\end{tabbing}}
|
||||
|
||||
%------------------------------------------------------------------------------
|
||||
% Software Skills
|
||||
%------------------------------------------------------------------------------
|
||||
|
||||
\section{Software Engineering Skills}
|
||||
|
||||
\skillgroup{Programming Languages and Protocols}
|
||||
{
|
||||
\textit{C++}\quad
|
||||
\textit{Java}\quad
|
||||
\textit{Bash}\quad
|
||||
\textit{Python}\quad
|
||||
\textit{Javascript}\quad
|
||||
\textit{SQL}\\
|
||||
\textit{C\#}\quad\hspace*{4pt}
|
||||
\textit{Lua}\quad\hspace*{4pt}
|
||||
\textit{R}\quad\hspace*{6pt}
|
||||
\textit{Node.js}\quad\hspace*{2pt}
|
||||
\textit{Perl}\quad
|
||||
\textit{Regex}\quad
|
||||
\textit{Git}
|
||||
}
|
||||
\skillgroup{Environments}
|
||||
{
|
||||
\textit{GNU/Linux}\quad
|
||||
\textit{BSD}\quad\hspace*{10pt}
|
||||
\textit{Windows}\\
|
||||
\textit{XenServer}\quad
|
||||
\textit{vSphere}\quad
|
||||
\textit{VirtualBox}
|
||||
}
|
||||
|
||||
\skillgroup{Physics Codes}
|
||||
{
|
||||
\textit{Cloudy} - Photoionization simulation code developed by my research team at the University of Kentucky, under Gary Ferland et al. Coded in C++, with scripting in Perl, Python, and BASH.\\
|
||||
\textit{LAMMPS} - Open source molecular dynamics simulation code. Coded in C++, with scripting in BASH and Python.\\
|
||||
\textit{psdlag} - Fourier analysis code using a max-likelihood optimization to overcome gappy data. Coded in C++ with a Jupyter-Notebook-friendly Cython interface.\\
|
||||
}
|
||||
|
||||
\skillgroup{High-Performance Computing}
|
||||
{
|
||||
\textit{MPI} - Protocol for handling messages between parallel computing tasks.\\
|
||||
\textit{ZFS} - File system and logical volume manager with emphasis on scalability and data integrity.\\
|
||||
\textit{Hadoop} - Distributed file system and data analysis platform.\\
|
||||
\textit{Jupyter} - Computational environment for managing, analyzing, and presenting data.\\
|
||||
\textit{Torque} - Job scheduler that understands Nvidia CUDA protocols.\\
|
||||
\textit{Slurm} - Open-source job scheduler for Linux.
|
||||
}
|
||||
|
||||
|
||||
\section{Technical and Practical Skills}
|
||||
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{\href{http://159.203.46.10/examples/}{Significant programming experience}, including strength in many languages and libraries, with emphases on modeling and on \href{http://159.203.46.10/examples/biaxial_stress_test.sh}{scripting} to \href{http://159.203.46.10/examples/stage3.sh}{analyze} and justify \href{http://159.203.46.10/examples/create_fort_files.cpp}{large and inconsistent datasets} and \href{http://159.203.46.10/examples/backup.sh}{automate system tasks.}}
|
||||
|
||||
\item{Training and practice in statistical learning, i.e. ``Machine Learning,'' including a variety of learning models.}
|
||||
|
||||
\item{Experience handling large, sparse datasets, i.e. ``Big Data,'' including the use of cluster computing for \href{http://159.203.46.10/examples/run_analysis.sh}{analysis.}}
|
||||
|
||||
\item{Experience with \href{http://159.203.46.10/examples/analyze_lightcurves.sh}{operating and programming for} high-performance cluster computing, i.e. ``Cloud Computing,'' using a variety of task scheduling platforms.}
|
||||
|
||||
\item{Practice in useful mathematics, including real and complex analysis, \href{http://159.203.46.10/science/simple_fourier_transform.pdf}{Fourier analysis}, and potential theory.}
|
||||
|
||||
\item{Strong sense of component networks, system control flows, and system abstractions.}
|
||||
|
||||
\item{Familiarity with most computer systems, esp. \href{http://159.203.46.10/img/adamonet_bsd_install.jpg}{GNU/Linux and BSD}, including a strong shell \href{http://159.203.46.10/examples/plot.sh}{scripting ability.}}
|
||||
|
||||
\item{Advanced knowledge of \href{http://159.203.46.10/science/spinz_commute.pdf}{physics} and inorganic chemistry.}
|
||||
|
||||
\item{Practical and theoretical experience with electronics.}
|
||||
|
||||
\item{Working knowledge of physics codes and \href{http://159.203.46.10/examples/tophat_fft.pl}{signal analysis codes}, especially \textit{Cloudy}, \textit{LAMMPS}, and \textit{psdlag}.}
|
||||
|
||||
\item{Strong critical thinking and writing skills, with experience using document-preparation paradigms including \href{http://159.203.46.10/examples/curriculum_vitae.tex}{LaTeX}, Libre Office, Google Docs, Microsoft Office, and MediaWiki, with familiarity of \href{http://159.203.46.10/stuff/utf8report.txt}{Unicode} and markup.}
|
||||
|
||||
\item{Developed public speaking presence with practice including 6 years of training, classroom, and lecture hall experience and 10 invited talks.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
|
||||
%----------------------------------------------------------------------------------------
|
||||
% EDUCATION SECTION
|
||||
%----------------------------------------------------------------------------------------
|
||||
|
||||
\section{Education}
|
||||
|
||||
\tabbedblock{
|
||||
\bf{2013-2017} \> B.S., major in Physics, minor in Mathematics - \href{http://http://www.wmich.edu/physics}{Western Michigan University}
|
||||
\\
|
||||
\>\ \ \ \ \ \ \textit{Advanced Scientific Writing}\\
|
||||
\>\ \ \ \ \ \ \textit{Physics Research Procedures}\\
|
||||
\>\ \ \ \ \ \ \textit{Statistical \& Classical Thermodynamics}\\
|
||||
\>\ \ \ \ \ \ \textit{Machine Learning}\\
|
||||
\>\ \ \ \ \ \ \textit{Lagrange Multipliers}\\
|
||||
\>\ \ \ \ \ \ \textit{Laplace Transformations}\\
|
||||
\>\ \ \ \ \ \ \textit{Fourier Analysis}\\
|
||||
\>\ \ \ \ \ \ \textit{Electric and Magnetic Properties of Matter}\\
|
||||
\>\ \ \ \ \ \ \textit{Fundamentals of Electromagnetic Theory: Maxwell's Equations}\\
|
||||
\>\ \ \ \ \ \ \textit{Atomic Physics}\\
|
||||
\>\ \ \ \ \ \ \textit{Quantum Wave Theory \& Matrix Mechanics}\\
|
||||
\>\ \ \ \ \ \ \textit{Lagranian \& Hamiltonian Classical Mechanics}\\
|
||||
\>\ \ \ \ \ \ \textit{Newtonian Mechanics}\\
|
||||
\>\ \ \ \ \ \ \textit{Transistor Fundamentals, Networks, and Digital Logic}\\
|
||||
\>\ \ \ \ \ \ \textit{Alternating and Direct Current RCL Networks}\\
|
||||
\>\ \ \ \ \ \ \textit{Fundamentals of Electronics}\\
|
||||
\>\ \ \ \ \ \ \textit{Intermediate Scientific Writing}\\
|
||||
\>\ \ \ \ \ \ \textit{Introductory Potential Theory: LaPlace's and Poisson's Equations}\\
|
||||
\>\ \ \ \ \ \ \textit{Calculus of Complex Variables}\\
|
||||
\>\ \ \ \ \ \ \textit{Vector-based Differential Geometry}\\
|
||||
\>\ \ \ \ \ \ \textit{Stellar Astrophysics}\\
|
||||
\>\ \ \ \ \ \ \textit{Introductory Statistical Mechanics}\\
|
||||
\>\ \ \ \ \ \ \textit{Keplerian Mechanics}\\
|
||||
\>\ \ \ \ \ \ \textit{Optics}\\
|
||||
\>\ \ \ \ \ \ \textit{Classical Wave Theory}\\
|
||||
\>\ \ \ \ \ \ \textit{Basic Scientific Inquiry}\\
|
||||
\>\ \ \ \ \ \ \textit{Special Relativity}\\
|
||||
\>\ \ \ \ \ \ \textit{Introductory Quantum Wave Theory}\\
|
||||
\>\ \ \ \ \ \ \textit{Philosophy of Science}\\
|
||||
\>\ \ \ \ \ \ \textit{Introductory Scientific Writing}\\
|
||||
\>\ \ \ \ \ \ \textit{Ordinary Differential Equations and Systems}\\
|
||||
\>\ \ \ \ \ \ \textit{Linear Algebra}\\
|
||||
\>\ \ \ \ \ \ \textit{Statistics}\\
|
||||
}
|
||||
\tabbedblock{
|
||||
\bf{2011-2013} \> Computer Science and Engineering - \href{http://http://www.wccnet.edu/}{Washtenaw Community College}
|
||||
\\
|
||||
\>\ \ \ \ \ \ \textit{Introductory Electromagnetism}\\
|
||||
\>\ \ \ \ \ \ \textit{Scalar and Vector Fields}\\
|
||||
\>\ \ \ \ \ \ \textit{Introductory Mechanics \& Thermodynamics}\\
|
||||
\>\ \ \ \ \ \ \textit{Shell Programming}\\
|
||||
\>\ \ \ \ \ \ \textit{Algorithms}\\
|
||||
\>\ \ \ \ \ \ \textit{Data and Object Structures}\\
|
||||
\>\ \ \ \ \ \ \textit{C++ and Java Programming}\\
|
||||
\>\ \ \ \ \ \ \textit{Differential \& Integral Calculus}\\
|
||||
}
|
||||
\tabbedblock{
|
||||
\bf{2005} \> Classical and Jazz Music Performance - \href{http://http://www.wccnet.edu/}{Lansing Community College}
|
||||
\\
|
||||
\>\ \ \ \ \ \ \textit{Trombone Chamber \& Jazz Performance}\\
|
||||
\>\ \ \ \ \ \ \textit{Classical Music Theory}\\
|
||||
\>\ \ \ \ \ \ \textit{Western Music History}\\
|
||||
\>\ \ \ \ \ \ \textit{Vocal Jazz Performance}\\
|
||||
\>\ \ \ \ \ \ \textit{Educational Piano}\\
|
||||
}
|
||||
|
||||
%------------------------------------------------------------------------------
|
||||
% Clusters
|
||||
%------------------------------------------------------------------------------
|
||||
|
||||
\section{Associated Computing Clusters}
|
||||
|
||||
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{\textit{Cloud 9} - The core system used by the Cloudy group (nublado.org) at the University of Kentucky. This is used to run programs for our spectroscopy in astrophysics course as well as to test new models as we develop the code and our projects for which the code is used to make scientific predictions.}
|
||||
|
||||
\item{\textit{Thor} - Provided by Western Michigan University's High Performance Computational Science Lab. Uses the Torque job scheduler and includes several Nvidia CUDA nodes. In some way, I have used this cluster for each undergraduate physics project listed in this document.}
|
||||
|
||||
\item{\textit{MSL Cluster} - Maintained by the Materials Simulation Lab at the University of South Florida. Uses SLURM to schedule jobs. Much of my work with LAMMPS was done on this cluster.}
|
||||
|
||||
\item{\textit{Circe} - Provided by the University of South Florida's Research Computing group. Standard shared-computing resource. I used this cluster to learn LAMMPS while the MSL Cluster was built.}
|
||||
|
||||
\item{\textit{WCCnet Hadoop Cluster} - Created as part of the Ann Arbor Transit Authority Hadoop traffic analytics project. Data is hosted using the Hadoop distributed file system and map reduce handles scheduling and passing jobs. I created and maintained this cluster myself.}
|
||||
|
||||
\item{\textit{AdamoNet} - My personal computing cluster. A network of physical and virtual servers for various tasks, including RAID storage of personal and scientific archives, off-site backups, game servers, communication platforms, and personal research. Virtual machines have been served using the XenServer and VirtualBox hypervisors.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
|
||||
|
||||
%------------------------------------------------------------------------------
|
||||
% Orgs
|
||||
%------------------------------------------------------------------------------
|
||||
|
||||
\section{Professional Associations}
|
||||
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{American Physical Society}
|
||||
|
||||
\item{American Astronomical Society}
|
||||
|
||||
\item{Society of Physics Students}
|
||||
|
||||
\item{Michigan Academy of Science, Arts, and Letters}
|
||||
|
||||
\end{itemize-noindent}
|
||||
|
||||
%------------------------------------------------------------------------------
|
||||
% Appointments
|
||||
%------------------------------------------------------------------------------
|
||||
|
||||
\section{Appointments}
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Communications Officer, Free Software Society, University of Kentucky.}
|
||||
|
||||
\item{Graduate Representative, Inclusive Pedagogies Graduate Learning Community, College of Arts \& Sciences, University of Kentucky.}
|
||||
|
||||
\item{Jurist, Academic Integrity Committee, Western Michigan University.}
|
||||
|
||||
\item{Student Representative, Gateways to Completion Steering Committee, Western Michigan University.}
|
||||
|
||||
\item{Librarian, Physics Club, Western Michigan University.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
|
||||
%------------------------------------------------------------------------------
|
||||
% Invited Talks
|
||||
%------------------------------------------------------------------------------
|
||||
\section{Invited Talks}
|
||||
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{\textbf{O. Ulrich}, ``Quantifying the Diffuse Continuum Contribution of BLR Clouds to AGN Continuum Inter-band Delays,'' Astrophysics Seminar Series, Dept. of Physics \& Astronomy, University of Kentucky, August 30, 2018.}
|
||||
|
||||
\item{\textbf{O. Ulrich}, ``Emission Line Equivalent Widths for AGN Modeling,'' Walking the Line, Arizona State University, Spring 2018.}
|
||||
|
||||
\item{M. Goad, D. Lawther, K. Korista, \textbf{O. Ulrich}, M. Vestergaard, ``Quantifying the diffuse continuum contribution from BLR gas: a modeling approach,'' Atlanta meeting of the Space Telescope Optical Reveberation Mapping (STORM) group, Fall 2017.}
|
||||
|
||||
\item{Anna Stephens, \textbf{Otho Ulrich}, Mariia Kravtsova, ``Is this a Pulsar?'' Spring Senior Seminar Series, Department of Computer Science, Western Michigan University, 2017.}
|
||||
|
||||
\item{D. Schuster, \textbf{O. Ulrich}, ``An Objectives-Mastery Credit Accumulation Course System,'' March meeting of the Michigan Academy of Science, Arts, and Letters, 2017.}
|
||||
|
||||
\item{\textbf{O. Ulrich}, ``Experiences in Research of an Undergraduate Physics Major,'' Colloquium Series, Department of Physics, Western Michigan University, Feb. 2017.}
|
||||
|
||||
\item{\textbf{O. Ulrich}, ``Undergraduate Research,'' October meeting of the Physics Club at Western Michigan University, 2016.}
|
||||
|
||||
\item{\textbf{O. Ulrich}, D. Schuster, ``Objectives-Mastery Modular Course Operating System,'' March meeting of the Michigan Academy of Science, Arts, and Letters, 2016.}
|
||||
|
||||
\item{\textbf{O. Ulrich}, J. Gonzalez, K.N. Cong, I. Oleynik, \href{http://meetings.aps.org/Meeting/MAR15/Session/D4.9}{``Atomic Structure of Grain Boundaries in Graphene,''} March meeting of the American Physical Society, 2015.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
|
||||
|
||||
%------------------------------------------------------------------------------
|
||||
% Publications
|
||||
%------------------------------------------------------------------------------
|
||||
\section{Articles}
|
||||
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{J. Shelley, \textbf{O. Ulrich}, ``Energy Decay Network (EdeN),'' 2019, Drafted, N.P.}
|
||||
|
||||
\item{D. Lawther, M.R. Goad, K.T. Korista, \textbf{O. Ulrich}, M. Vestergaard, \href{https://academic.oup.com/mnras/article-abstract/481/1/533/5076066}{``Quantifying the Diffuse Continuum Contribution of BLR Clouds to AGN Continuum Inter-band Delays,''} 2018, MNRAS, 481, 533-554}
|
||||
|
||||
\item{Anna Stephens, \textbf{Otho Ulrich}, Mariia Kravtsova, ``Machine Learning to Classify Pulsar Candidates,'' 2017, Western Michigan University, N.P.}
|
||||
|
||||
\item{David Schuster and \textbf{Otho Ulrich}, Abstract: ``An Objectives-Mastery Credit Accumulation Course System,'' 2017, Michigan Academician, XLV 1, 67.}
|
||||
|
||||
\item{Allan Schnorr-Müller, R.I. Davies, K.T. Korista, L. Burtscher, D. Rosario, T. Storchi-Bergmann, A. Contursi, R. Genzel, J. Graciá-Carpio, E.K.S. Hicks, A. Janssen, M. Koss, M.-Y. Lin, D. Lutz, W. Maciejewski, F. Müller-Sánchez, G. Orban de Xivry, R. Riffel, R.A. Riffel, M. Schartmann, A. Sternberg, E. Sturm, L. Tacconi, S. Veilleux, \textbf{O. A. Ulrich}, \href{https://arxiv.org/abs/1607.07308}{``Constraints on the Broad Line Region Properties and Extinction in Local Seyferts,''}2016, MNRAS, 462, 3570-3590.}
|
||||
|
||||
\item{\textbf{Otho A. Ulrich}, Edward M. Cackett, \href{https://clas.wayne.edu/physics/reu_reports/ulrich.pdf}{``Optical/UV Band Reverberation Mapping of NGC 5548 with Frequency-Resolved Techniques,''} 2016, Wayne State University College of Arts and Sciences.}
|
||||
|
||||
\item{David Schuster and \textbf{Otho Ulrich}, Abstract: ``A Very Different Way of Running Your Course and Assessments: An Objectives-Mastery Modular System,'' 2016, Michigan Academician, XLIV 2, 126-127.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
|
||||
%------------------------------------------------
|
||||
\section{Peer Review}
|
||||
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{\textit{The Particle Accelerator Simulation Code PyORBIT}, Shishlo, Andrei, et al., reviewed as submission for the Large Scale Computational Physics Workshop at the International Conference on Computational Science, 2015.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
|
||||
%------------------------------------------------------------------------------
|
||||
% Awards
|
||||
%------------------------------------------------------------------------------
|
||||
\section{Awards}
|
||||
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{GAANN Fellowship, U.S. Department of Education \& Department of Physics and Astronomy, University of Kentucky.}
|
||||
|
||||
\item{Research Fellowship, Department of Physics and Astronomy, University of Kentucky.}
|
||||
|
||||
\item{Paul Rood Physics Scholarship, Department of Physics, Western Michigan University.}
|
||||
|
||||
\item{Thomas Dickinson Award, Department of Physics, Western Michigan University.}
|
||||
|
||||
\item{Dean's List, College of Arts and Science, Western Michigan University.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
|
||||
|
||||
|
||||
%------------------------------------------------------------------------------
|
||||
% Research
|
||||
%------------------------------------------------------------------------------
|
||||
|
||||
\section{Technical Experience}
|
||||
|
||||
%------------------------------------------------
|
||||
|
||||
\job
|
||||
{Present -}{2014}
|
||||
{Independent Research}
|
||||
{}
|
||||
{Researcher, Developer, Physicist}
|
||||
{
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Developing a thermodynamics-based approach to statistical learning and artificial intelligence based on energy-decay networks.}
|
||||
|
||||
\item{Developing test-case learning scenarios for energy-decay network to practice its capabilities of adaptation and growth, including spatial orientation and pathing tasks, low-energy communications waveform solutions, and a variety of other paradigms.}
|
||||
|
||||
\item{Developed, using R and C++, a statistical learning algorithm to classify pulsar candidates which produced a 98\% accuracy rate across more than 15,000 samples. This task must currently be done by humans, so the potential to aid astrophysics research output is significant. (2017)}
|
||||
|
||||
\item{Developed TAAP, a time-dependent, generalized physics simulation engine, which included particle interactions dictated by gravitational and electromagnetic forces. (2014)}
|
||||
|
||||
\end{itemize-noindent}
|
||||
}
|
||||
|
||||
%------------------------------------------------
|
||||
|
||||
\job
|
||||
{Present -}{2017}
|
||||
{Mjolnir Software}
|
||||
{}
|
||||
{Science Officer, Developer}
|
||||
{
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Research and present physical models intended as possible game engine elements.}
|
||||
|
||||
\item{Review all scientific information before it is applied in development.}
|
||||
|
||||
\item{Design and develop game engine elements for Galaxy In Flames, the company's major game product, primarily using C\#.}
|
||||
|
||||
\item{Write technical documentation for engine elements, scientific elements, and development procedures.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
}
|
||||
|
||||
|
||||
%------------------------------------------------
|
||||
|
||||
\job
|
||||
{Present -}{2018}
|
||||
{Department of Physics and Astronomy, University of Kentucky}
|
||||
{}
|
||||
{Research Assistant to Gary Ferland}
|
||||
{
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Extend atomic physics models in the Cloudy photoionization software.}
|
||||
|
||||
\item{Develop extended quantum mechanical models by correlating sparse datasets from NIST and recently-published works.}
|
||||
|
||||
\item{Automate functions using BASH, Perl, and Python scripting to cleanly resolve data collisions and inconsistencies between large sets of atomic data across 16 atomic species.}
|
||||
|
||||
\item{Use Cloudy to simulate diagnostic emission measurements relevant to observational astronomers as a scientific check on new theoretical models.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
}
|
||||
|
||||
%------------------------------------------------
|
||||
|
||||
\job
|
||||
{2018 -}{2016}
|
||||
{Department of Physics and Astronomy, Wayne State University}
|
||||
{}
|
||||
{Computational Researcher under Professor Ed Cackett}
|
||||
{
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Using frequency-domain techniques developed in C++, studied the reverberation of NGC 5548 apparent in the optical and UV lightcurves published in \href{https://arxiv.org/abs/1510.05648}{STORM III, Fausnaugh, et al.}.}
|
||||
|
||||
\item{Characterized the reverberation by the power spectral densities of the curves and their frequency-dependent time lags.}
|
||||
|
||||
\item{Attempted to solve the problem of gappy data in the light curves by preparing the data using BASH and Perl scripting then using a novel frequency-domain approach developed by Dr. Abdu Zoghbi, University of Michigan, provided as the "psdlag" code.}
|
||||
|
||||
\item{Recovered the transfer functions for each set of wavelengths by optimizing test functions in the frequency domain, then compared against current accretion disk model predictions.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
}
|
||||
|
||||
%------------------------------------------------
|
||||
|
||||
\job
|
||||
{2017 -}{2015}
|
||||
{Department of Physics, Western Michigan University}
|
||||
{}
|
||||
{Research Assistant for Professor Kirk Korista}
|
||||
{
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Used the photoionization simulation software Cloudy to compute predictions of emission line strengths across a parameter space including incident Hydrogen-ionizing flux, Hydrogen number density, and column density.}
|
||||
|
||||
\item{Developed using C++ a continuum spectrum model for the electromagnetic source radiation from NGC 5548, adapting it as knowledge of active galactic nuclei grew.}
|
||||
|
||||
\item{Work credited on the paper \href{https://arxiv.org/abs/1607.07308}{Constraints on the Broad Line Region Properties and Extinction in Local Seyferts.}, wherein predicted hydrogen emission line strengths are used to aid in the comparison of the broad H-I line ratios, one of the features used to compute extinction, across the sample of AGN.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
}
|
||||
|
||||
%------------------------------------------------
|
||||
|
||||
\job
|
||||
{2014}{}
|
||||
{Materials Simulation Laboratory, University of South Florida, Department of Applied Physics}
|
||||
{}
|
||||
{Computational Researcher under Professor Ivan Oleynik}
|
||||
{
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Studied the mechanical properties, i.e. tensile strengths and stress-strain curves, of graphene bicrystals.}
|
||||
|
||||
\item{Simulated and analyzed graphene bicrystals using LAMMPS, a molecular dynamics simulation software, and the Carbon-Carbon screened environment-dependent reactive empirical bond order potential function devised by the Materials Simulation Laboratory.}
|
||||
|
||||
\item{Presented this work in an \href{http://meetings.aps.org/Meeting/MAR15/Session/D4.9}{oral presentation} during the San Antonio APS Meeting, March 2015.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
}
|
||||
|
||||
%------------------------------------------------
|
||||
|
||||
\job
|
||||
{2014}{}
|
||||
{Tandem Van de Graaff Accelerator Laboratory, Department of Physics, Western Michigan University}
|
||||
{}
|
||||
{Lab Assistant for Accelerator Engineer Allan Kern}
|
||||
{
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Designed, printed, and constructed rack-based power source for particle accelerator's inflection magnet.}
|
||||
|
||||
\item{Used 12 synchronized latching relays and timing circuits to manage switching of polarity of the accelerator's inflection magnet.}
|
||||
|
||||
\item{Designed power source to allow sufficient current for inflection electromagnetic to steer gold ions.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
}
|
||||
|
||||
|
||||
%------------------------------------------------
|
||||
|
||||
\job
|
||||
{2013}{}
|
||||
{Business and Technology Department, Washtenaw Community College}
|
||||
{}
|
||||
{Lab Technician}
|
||||
{
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Created a Hadoop-based data analytics program for analyzing traffic data to predict more efficient bus routes, created in partnership with the Ann Arbor Transit Authority.}
|
||||
|
||||
\item{Setup and maintained virtual server environment for deployment of department and research applications.}
|
||||
|
||||
\item{Created interactive tools for coordination between technicians, faculty, and staff using RESTful API and Django.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
%------------------------------------------------------------------------------
|
||||
% Teaching
|
||||
%------------------------------------------------------------------------------
|
||||
|
||||
|
||||
\section{Teaching Experience}
|
||||
|
||||
%------------------------------------------------
|
||||
|
||||
\job
|
||||
{2019 -}{2017}
|
||||
{PHY 231: University Physics I, Department of Physics and Astronomy, University of Kentucky}
|
||||
{}
|
||||
{Teaching Assistant}
|
||||
{
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Provided auxiliary classroom instruction for calculus-based mechanics courses and 100-level astronomy courses.}
|
||||
|
||||
\item{Facilitated group work sessions, a scientifically-validated alternative to traditional lecture for learning introductory physics.}
|
||||
|
||||
\item{For mechanics courses, developed lesson plans each week to aid students in their solution of a weekly problem set.}
|
||||
|
||||
\item{For astronomy courses, assisted in running observational experiments during lecture, and provided guidance on student research projects.}
|
||||
|
||||
\item{Provided supplemental instruction during office hours and other special sessions.}
|
||||
|
||||
\item{Graded papers and proctored exams.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
}
|
||||
|
||||
%------------------------------------------------
|
||||
|
||||
\job
|
||||
{2017 -}{2015}
|
||||
{PHY 3520: Waves and Optics, Department of Physics, Western Michigan University}
|
||||
{}
|
||||
{Learning Assistant to Professor David Schuster}
|
||||
{
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Developed while applying a mastery assessment-based approach to course evaluations}
|
||||
|
||||
\item{Presented continuing developments in this method to the Michigan Academy of Science, Arts, and Letters in 2016 and 2017.}
|
||||
|
||||
\item{Designed two lessons: one on the speed of sound, involving a marching band problem; another on harmonics using a brass instrument problem and a brass instrument demonstration.}
|
||||
|
||||
\item{Graded assignments and exams.}
|
||||
|
||||
\item{Provided supplemental instruction.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
}
|
||||
|
||||
\job
|
||||
{2017 -}{2015}
|
||||
{PHY 2050 \& 2070: University Physics I \& II, Department of Physics, Western Michigan University}
|
||||
{}
|
||||
{Learning Assistant}
|
||||
{
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Provided a helpful presence under a "reversed-class" paradigm, where the in-class time is primarily used for group problem solving, and students are expected to study the lecture material outside of class. This typically means guiding students' inquiries as they attempt to synthesize physical reasoning to solve the assigned problems.}
|
||||
|
||||
\item{Conducted thrice-weekly review sessions mirroring material provided by the lecture, with an emphasis on understanding how general principles simplify under proper assumptions.}
|
||||
|
||||
\item{Solved problems designed by the professor as a test case before introducing them in group problem-solving settings.}
|
||||
|
||||
\item{Created training on critical thinking and scientific philosophy for learning assistant team members, then conducted the training.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
}
|
||||
|
||||
|
||||
|
||||
\job
|
||||
{2016}{}
|
||||
{PHY 3250: Introduction to Astrophysics, Department of Physics, Western Michigan University}
|
||||
{}
|
||||
{Grader}
|
||||
{
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Graded homework assignments, providing student developmental feedback as needed.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
}
|
||||
|
||||
|
||||
\job
|
||||
{2017 -}{2014}
|
||||
{Student Success Services, College of Arts and Sciences, Western Michigan University}
|
||||
{}
|
||||
{Peer Academic Coach}
|
||||
{
|
||||
Supported student learning as a tutor in physics, chemistry, mathematics, philosophy, computer science, and general learning.
|
||||
}
|
||||
|
||||
\job
|
||||
{2013 -}{2009}
|
||||
{Emerson School}
|
||||
{}
|
||||
{Music Tutor}
|
||||
{
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Conducted individual lessons with students of brass and piano performance.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
}
|
||||
|
||||
|
||||
\job
|
||||
{2013 -}{2012}
|
||||
{Business and Technology Department, Washtenaw Community College}
|
||||
{}
|
||||
{Lab Assistant}
|
||||
{
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Assisted in various computer science courses, running review sessions, labs, tutoring, and providing general course help.}
|
||||
|
||||
\item{Developed computer-based applications used by the professors and administration for keeping track of coursework, lab setup, and faculty requests.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
%------------------------------------------------------------------------------
|
||||
% Interests
|
||||
%------------------------------------------------------------------------------
|
||||
\section{Relevant Interests}
|
||||
|
||||
\begin{itemize-noindent}
|
||||
|
||||
\item{Maintain a server rack tower and computing cluster at home from repurposed equipment, as a self-guided learning tool, and for fun and profit.}
|
||||
|
||||
\item{Strong interest in the theory and application of science education, especially as to education of the public.}
|
||||
|
||||
\item{Advocate of critical thinking and scientific skepticism, with interest in bringing those topics to the public on a larger scale.}
|
||||
|
||||
\item{Active member of the Free Software Society at University of Kentucky, and Physics Club, Computer Club, and the Advanced Computing Society at Western Michigan University.}
|
||||
|
||||
\item{Coded simulations for fun since childhood. A favorite pastime has been to create network-based space simulations in which friends can dogfight, and other internet-connected simulated worlds, going back to the early days of the internet.}
|
||||
|
||||
\item{Play improv piano almost every day.}
|
||||
|
||||
\end{itemize-noindent}
|
||||
|
||||
|
||||
%----------------------------------------------------------------------------------------
|
||||
% REFERRAL SECTION
|
||||
%----------------------------------------------------------------------------------------
|
||||
|
||||
\section{Referrals}
|
||||
|
||||
\parbox{0.5\textwidth}{ % First block
|
||||
\begin{tabbing}
|
||||
\hspace{2.75cm} \= \hspace{4cm} \= \kill % Spacing within the block
|
||||
{\bf Name} \> Kirk Korista \\
|
||||
{\bf Organization} \> Western Michigan University \\
|
||||
{\bf Position} \> Professor and Chair \\
|
||||
{\bf Contact} \> \href{mailto:kirk.korista@wmich.edu}{kirk.korista@wmich.edu}
|
||||
\end{tabbing}}
|
||||
\hfill % Horizontal space between the two blocks
|
||||
\parbox{0.5\textwidth}{ % Second block
|
||||
\begin{tabbing}
|
||||
\hspace{2.75cm} \= \hspace{4cm} \= \kill % Spacing within the block
|
||||
{\bf Name} \> Zach Simmons\\
|
||||
{\bf Organization} \> Mjolnir Software \\
|
||||
{\bf Position} \> CEO \\
|
||||
{\bf Contact} \> \href{mailto:zacvausim@gmail.edu}{zacvausim@gmail.edu}
|
||||
\end{tabbing}}
|
||||
\hfill % Horizontal space between the two blocks
|
||||
\parbox{0.5\textwidth}{ % Second block
|
||||
\begin{tabbing}
|
||||
\hspace{2.75cm} \= \hspace{4cm} \= \kill % Spacing within the block
|
||||
{\bf Name} \> David Schuster\\
|
||||
{\bf Organization} \> Western Michigan University \\
|
||||
{\bf Position} \> Professor \\
|
||||
{\bf Contact} \> \href{mailto:david.schuster@wmich.edu}{david.schuster@wmich.edu}
|
||||
\end{tabbing}}
|
||||
|
||||
|
||||
%----------------------------------------------------------------------------------------
|
||||
|
||||
\end{document}
|
BIN
htdocs/examples/cv.pdf
Normal file
BIN
htdocs/examples/cv.pdf
Normal file
Binary file not shown.
12229
htdocs/examples/entanglement.html
Normal file
12229
htdocs/examples/entanglement.html
Normal file
File diff suppressed because one or more lines are too long
74
htdocs/examples/gaussian_fit.py
Executable file
74
htdocs/examples/gaussian_fit.py
Executable file
@ -0,0 +1,74 @@
|
||||
import numpy as np
|
||||
from astropy.modeling import models, fitting
|
||||
|
||||
# Using Models
|
||||
|
||||
# The astropy.modeling package defines a number of models that are collected under a single namespace as astropy.modeling.models. Models behave like parametrized functions:
|
||||
|
||||
from astropy.modeling import models
|
||||
g = models.Gaussian1D(amplitude=1.2, mean=0.9, stddev=0.5)
|
||||
print(g)
|
||||
# Model: Gaussian1D
|
||||
# Inputs: ('x',)
|
||||
# Outputs: ('y',)
|
||||
# Model set size: 1
|
||||
# Parameters:
|
||||
# amplitude mean stddev
|
||||
# --------- ---- ------
|
||||
# 1.2 0.9 0.5
|
||||
#
|
||||
# Model parameters can be accessed as attributes:
|
||||
|
||||
g.amplitude
|
||||
# Parameter('amplitude', value=1.2)
|
||||
g.mean
|
||||
# Parameter('mean', value=0.9)
|
||||
g.stddev
|
||||
# Parameter('stddev', value=0.5)
|
||||
|
||||
# and can also be updated via those attributes:
|
||||
|
||||
g.amplitude = 0.8
|
||||
g.amplitude
|
||||
# Parameter('amplitude', value=0.8)
|
||||
|
||||
# Models can be evaluated by calling them as functions:
|
||||
|
||||
g(0.1)
|
||||
# 0.22242984036255528
|
||||
g(np.linspace(0.5, 1.5, 7))
|
||||
# array([ 0.58091923, 0.71746405, 0.7929204 , 0.78415894, 0.69394278,
|
||||
# 0.54952605, 0.3894018 ])
|
||||
|
||||
|
||||
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
from astropy.modeling import models, fitting
|
||||
|
||||
# Generate fake data
|
||||
np.random.seed(0)
|
||||
x = np.linspace(-5., 5., 200)
|
||||
y = 3 * np.exp(-0.5 * (x - 1.3)**2 / 0.8**2)
|
||||
y += np.random.normal(0., 0.2, x.shape)
|
||||
|
||||
# Fit the data using a box model
|
||||
t_init = models.Trapezoid1D(amplitude=1., x_0=0., width=1., slope=0.5)
|
||||
fit_t = fitting.LevMarLSQFitter()
|
||||
t = fit_t(t_init, x, y)
|
||||
|
||||
# Fit the data using a Gaussian
|
||||
g_init = models.Gaussian1D(amplitude=1., mean=0, stddev=1.)
|
||||
fit_g = fitting.LevMarLSQFitter()
|
||||
g = fit_g(g_init, x, y)
|
||||
|
||||
# Plot the data with the best-fit model
|
||||
plt.figure(figsize=(8,5))
|
||||
plt.plot(x, y, 'ko')
|
||||
plt.plot(x, t(x), label='Trapezoid')
|
||||
plt.plot(x, g(x), label='Gaussian')
|
||||
plt.xlabel('Position')
|
||||
plt.ylabel('Flux')
|
||||
plt.legend(loc=2)
|
||||
|
||||
plt.show()
|
BIN
htdocs/examples/home_rack_and_lab.jpg
Normal file
BIN
htdocs/examples/home_rack_and_lab.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 442 KiB |
361
htdocs/examples/interpolation_fix.cpp
Normal file
361
htdocs/examples/interpolation_fix.cpp
Normal file
@ -0,0 +1,361 @@
|
||||
#include "agn.hpp"
|
||||
#include "spectral_lines.hpp"
|
||||
#include "spline.h"
|
||||
|
||||
bool print_histogram=true;
|
||||
|
||||
int main(int argc, char const *argv[]) {
|
||||
std::ifstream eqwidth_table_file;
|
||||
eqwidth_table_file.open(argv[1]);
|
||||
agn::eqwidth_table jagged_table = agn::read_eqwidth_table(eqwidth_table_file);
|
||||
eqwidth_table_file.close();
|
||||
|
||||
std::cout
|
||||
<< "Interpolation Fix - "
|
||||
<< jagged_table.header[0]
|
||||
<< "\n";
|
||||
|
||||
std::cout << "Finding outliers.\n";
|
||||
agn::gridcoordlist outliers = agn::find_outliers(jagged_table);
|
||||
//agn::gridcoordlist outliers = agn::known_outliers();
|
||||
|
||||
std::cout << "Replacing outliers with interpolated values.\n";
|
||||
agn::eqwidth_table smooth_table = agn::smooth(jagged_table,outliers);
|
||||
|
||||
// Output debug info
|
||||
agn::gridcoordlist::iterator outlier_it = outliers.begin();
|
||||
while(outlier_it != outliers.end()) {
|
||||
std::cout
|
||||
<< std::setprecision(2)
|
||||
<< std::fixed
|
||||
<< "Reported smooth at: "
|
||||
<< outlier_it->first
|
||||
<< ", "
|
||||
<< outlier_it->second
|
||||
<< "\n"
|
||||
<< std::scientific
|
||||
<< "Old value: "
|
||||
<< jagged_table.value[outlier_it->first][outlier_it->second]
|
||||
<< "\t"
|
||||
<< "New value: "
|
||||
<< smooth_table.value[outlier_it->first][outlier_it->second]
|
||||
<< "\n";
|
||||
outlier_it++;
|
||||
}
|
||||
std::ofstream smoothed_table_file;
|
||||
smoothed_table_file.open(argv[2]);
|
||||
smoothed_table_file << agn::format_eqwidth_table(smooth_table);
|
||||
smoothed_table_file.close();
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
agn::eqwidth_table agn::smooth(agn::eqwidth_table & jagged_table,agn::gridcoordlist coordlist) {
|
||||
if(agn::debug) std::cout
|
||||
<< "Received Coordinate list with "
|
||||
<< coordlist.size()
|
||||
<< " items.\n";
|
||||
|
||||
agn::eqwidth_table smooth_table = agn::eqwidth_table(jagged_table);
|
||||
agn::gridcoordlist::iterator coord_it = coordlist.begin();
|
||||
while(coord_it != coordlist.end() ) {
|
||||
double x = coord_it->first;
|
||||
double y = coord_it->second;
|
||||
if(agn::debug) std::cout
|
||||
<< "Smoothing at hden="
|
||||
<< x
|
||||
<< ", phi="
|
||||
<< y
|
||||
<< ":\t";
|
||||
agn::table1d slice = agn::table1d(smooth_table.value[x]);
|
||||
|
||||
double oldx = y;
|
||||
double oldy = slice[oldx];
|
||||
|
||||
std::vector<double> X;
|
||||
std::vector<double> Y;
|
||||
|
||||
// Omit any points in the outlier list.
|
||||
std::set<double> omissions;
|
||||
agn::gridcoordlist::iterator outlier_it = coordlist.begin();
|
||||
while ( outlier_it != coordlist.end() ) {
|
||||
if ( (*outlier_it).first == (*coord_it).first ) {
|
||||
omissions.insert((*outlier_it).second);
|
||||
}
|
||||
outlier_it++;
|
||||
}
|
||||
|
||||
agn::table1d::iterator slit = slice.begin();
|
||||
while ( slit != slice.end() ) {
|
||||
std::set<double>::iterator omissions_it = omissions.begin();
|
||||
while ( omissions_it != omissions.end() ) {
|
||||
if ( slit->first == *omissions_it) {
|
||||
slit++;
|
||||
continue;
|
||||
}
|
||||
omissions_it++;
|
||||
}
|
||||
X.push_back(slit->first);
|
||||
Y.push_back(slit->second);
|
||||
slit++;
|
||||
}
|
||||
|
||||
agn::Spline<double,double> interpolator(X,Y);
|
||||
|
||||
slit = slice.find(y);
|
||||
|
||||
double newx = slit->first;
|
||||
double newy = interpolator[newx];
|
||||
|
||||
if ( newx != oldx ) {
|
||||
std::cerr << "Problem smoothing.\n";
|
||||
coord_it++;
|
||||
continue;
|
||||
}
|
||||
|
||||
smooth_table.value[x][y] = newy;
|
||||
|
||||
if ( agn::debug ) {
|
||||
std::cout
|
||||
<< std::scientific
|
||||
<< "Smoothed: "
|
||||
<< newx
|
||||
<< ", "
|
||||
<< oldy
|
||||
<< " -> "
|
||||
<< newy
|
||||
<< "\n";
|
||||
}
|
||||
|
||||
coord_it++;
|
||||
}
|
||||
|
||||
if(agn::debug) std::cout << "\n";
|
||||
return smooth_table;
|
||||
}
|
||||
|
||||
agn::gridcoordlist agn::known_outliers() {
|
||||
agn::gridcoordlist colden23_181515_diverged_points;
|
||||
colden23_181515_diverged_points.push_front(std::make_pair(10.25,20.0 ));
|
||||
colden23_181515_diverged_points.push_front(std::make_pair(10.5,20.25 ));
|
||||
colden23_181515_diverged_points.push_front(std::make_pair(11.25,20.75));
|
||||
colden23_181515_diverged_points.push_front(std::make_pair(11.5,21.0 ));
|
||||
colden23_181515_diverged_points.push_front(std::make_pair(11.5,22.0 ));
|
||||
colden23_181515_diverged_points.push_front(std::make_pair(11.75,21.25));
|
||||
colden23_181515_diverged_points.push_front(std::make_pair(12.0,21.5 ));
|
||||
colden23_181515_diverged_points.push_front(std::make_pair(12.5,22.0 ));
|
||||
colden23_181515_diverged_points.push_front(std::make_pair(12.5,22.25 ));
|
||||
colden23_181515_diverged_points.push_front(std::make_pair(9.75,19.75 ));
|
||||
return colden23_181515_diverged_points;
|
||||
}
|
||||
|
||||
agn::gridcoordlist agn::find_outliers(agn::eqwidth_table jagged_table) {
|
||||
if (agn::debug) std::cout
|
||||
<< "Scanning for outliers.\n";
|
||||
agn::gridcoordlist outliers;
|
||||
|
||||
// The distribution is
|
||||
// parameterised by x=log(jagged_table.hden) and
|
||||
// y=log(jagged_table.eqwidth).
|
||||
|
||||
agn::iterator2d hden_it = jagged_table.value.begin();
|
||||
while ( hden_it != jagged_table.value.end() ) {
|
||||
agn::table1d slice = hden_it->second;
|
||||
if( agn::debug ) std::cout
|
||||
<< "\nhden= "
|
||||
<< (hden_it->first)
|
||||
<< ": ";
|
||||
|
||||
// Crawl, checking slope, until a positive slope is found.
|
||||
agn::table1d::iterator phi_it = slice.begin();
|
||||
double x1,x2,y1,y2,slope;
|
||||
std::vector<double> ref_curve_x,ref_curve_y,curve_back_x,curve_back_y;
|
||||
double anomalies_start_x,anomalies_end_x;
|
||||
x1 = phi_it->first;
|
||||
y1 = log10(phi_it->second);
|
||||
ref_curve_x.push_back(x1);
|
||||
ref_curve_y.push_back(y1);
|
||||
phi_it++;
|
||||
bool beginning_rise = true;
|
||||
while ( phi_it != slice.end() ) {
|
||||
x2 = phi_it->first;
|
||||
y2 = log10(phi_it->second);
|
||||
slope = (y2 - y1)/(x2 - x1);
|
||||
if (slope > -0.05 ) {
|
||||
if (agn::debug) std::cout
|
||||
<< "Found anomaly(x="
|
||||
<< x1
|
||||
<< ";slope="
|
||||
<< slope
|
||||
<< "), ";
|
||||
if ( beginning_rise ) {
|
||||
if (agn::debug) std::cout
|
||||
<< "Found left-wise rise, ";
|
||||
while ( slope > -0.05 ) {
|
||||
x1 = phi_it->first;
|
||||
y1 = log10(phi_it->second);
|
||||
phi_it++;
|
||||
if ( phi_it == slice.end() ) break;
|
||||
x2 = phi_it->first;
|
||||
y2 = log10(phi_it->second);
|
||||
slope = (y2 - y1)/(x2 - x1);
|
||||
}
|
||||
ref_curve_x.clear();
|
||||
ref_curve_y.clear();
|
||||
ref_curve_x.push_back(x1);
|
||||
ref_curve_y.push_back(y1);
|
||||
}
|
||||
else {
|
||||
ref_curve_x.pop_back();
|
||||
ref_curve_y.pop_back();
|
||||
phi_it++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if ( slope < -0.05 && ( slice.find(x1) != slice.begin())) beginning_rise = false;
|
||||
x1 = x2;
|
||||
y1 = y2;
|
||||
ref_curve_x.push_back(x1);
|
||||
ref_curve_y.push_back(y1);
|
||||
phi_it++;
|
||||
}
|
||||
|
||||
if ( phi_it == slice.end() ) {
|
||||
if (agn::debug) std::cout
|
||||
<< "Clean";
|
||||
hden_it++;
|
||||
continue;
|
||||
}
|
||||
|
||||
x1 = phi_it->first;
|
||||
y1 = log10(phi_it->second);
|
||||
curve_back_x.push_back(x1);
|
||||
curve_back_y.push_back(y1);
|
||||
phi_it++;
|
||||
while ( phi_it != slice.end() ) {
|
||||
x2 = phi_it->first;
|
||||
y2 = log10(phi_it->second);
|
||||
slope = (y2 - y1)/(x2 - x1);
|
||||
if (slope > 0 ) {
|
||||
curve_back_x.clear();
|
||||
curve_back_y.clear();
|
||||
}
|
||||
if ( y2 <= EQWIDTH_MIN_VAL_LOG ) {
|
||||
break;
|
||||
}
|
||||
x1 = x2;
|
||||
y1 = y2;
|
||||
curve_back_x.push_back(x1);
|
||||
curve_back_y.push_back(y1);
|
||||
phi_it++;
|
||||
}
|
||||
|
||||
if ( curve_back_x.size() < 2 ) {
|
||||
if(agn::debug) std::cout
|
||||
<< "Found too few end points for spline ("
|
||||
<< curve_back_x.size()
|
||||
<< ")";
|
||||
hden_it++;
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
if (ref_curve_x.size() > 2) {
|
||||
ref_curve_x.erase(ref_curve_x.begin());
|
||||
ref_curve_y.erase(ref_curve_y.begin());
|
||||
ref_curve_x.pop_back();
|
||||
ref_curve_y.pop_back();
|
||||
}
|
||||
if (curve_back_x.size() > 2) {
|
||||
curve_back_x.erase(curve_back_x.begin());
|
||||
curve_back_y.erase(curve_back_y.begin());
|
||||
}
|
||||
anomalies_start_x = *ref_curve_x.rbegin();
|
||||
anomalies_end_x = *curve_back_x.begin();
|
||||
|
||||
if (agn::debug) std::cout
|
||||
<< "Anomalous region between x coords "
|
||||
<< anomalies_start_x
|
||||
<< " and "
|
||||
<< anomalies_end_x
|
||||
<< ", ";
|
||||
|
||||
std::vector<double>::iterator curve_back_x_it = curve_back_x.begin();
|
||||
std::vector<double>::iterator curve_back_y_it = curve_back_y.begin();
|
||||
while ( curve_back_x_it != curve_back_x.end() &&
|
||||
curve_back_y_it != curve_back_y.end() ) {
|
||||
ref_curve_x.push_back(*curve_back_x_it);
|
||||
ref_curve_y.push_back(*curve_back_y_it);
|
||||
curve_back_x_it++;
|
||||
curve_back_y_it++;
|
||||
}
|
||||
|
||||
if (agn::debug) std::cout
|
||||
<< "Creating Spline with "
|
||||
<< ref_curve_x.size()
|
||||
<< " points, ";
|
||||
|
||||
// Creating spline from vectors
|
||||
agn::Spline<double,double> testing_spline(ref_curve_x,ref_curve_y);
|
||||
|
||||
// Calculate sigma from the reference curve.
|
||||
std::vector<double>::iterator ref_curve_y_it = ref_curve_y.begin();
|
||||
double sigma = 0;
|
||||
double mean = 0;
|
||||
int num = ref_curve_y.size();
|
||||
double sum = 0;
|
||||
double threshold = 0;
|
||||
while ( ref_curve_y_it != ref_curve_y.end() ) {
|
||||
mean += *ref_curve_y_it;
|
||||
ref_curve_y_it++;
|
||||
}
|
||||
mean /= num;
|
||||
ref_curve_y_it = ref_curve_y.begin();
|
||||
while ( ref_curve_y_it != ref_curve_y.end() ) {
|
||||
sum += pow(*ref_curve_y_it - mean,2);
|
||||
ref_curve_y_it++;
|
||||
}
|
||||
sigma = sqrt(sum / num);
|
||||
|
||||
// Interpolate all values in anomalies_x range and add to list when value is outside threshold.
|
||||
//threshold = sigma*SIGMA_THRESHOLD_MULTIPLIER;
|
||||
//if (agn::debug) std::cout
|
||||
// << "using sigma(n="
|
||||
// << num
|
||||
// << ")="
|
||||
// << sigma
|
||||
// << " and threshold="
|
||||
// << threshold
|
||||
// << ", ";
|
||||
phi_it = slice.find(anomalies_start_x);
|
||||
while (phi_it != (++(slice.find(anomalies_end_x)))) {
|
||||
y1 = log10(phi_it->second);
|
||||
x1 = phi_it->first;
|
||||
y2 = testing_spline[x1];
|
||||
//threshold = RATIO_THRESHOLD_MULTIPLIER * (y2 - EQWIDTH_MIN_VAL_LOG);
|
||||
threshold = 2*log(1 + RATIO_THRESHOLD_MULTIPLIER);
|
||||
double absdiff = sqrt(pow(y2 - y1,2));
|
||||
if(agn::debug) std::cout
|
||||
<< "Checking "
|
||||
<< "x="
|
||||
<< x1
|
||||
<< "("
|
||||
<< absdiff
|
||||
<< " > "
|
||||
<< threshold
|
||||
<< "), ";
|
||||
if ( absdiff > threshold) {
|
||||
if(agn::debug) std::cout
|
||||
<< "MARKED, ";
|
||||
std::pair<double,double> outlier = std::make_pair(hden_it->first,phi_it->first);
|
||||
outliers.push_back(outlier);
|
||||
}
|
||||
phi_it++;
|
||||
}
|
||||
if(agn::debug) std::cout << "\n";
|
||||
hden_it++;
|
||||
}
|
||||
|
||||
if (agn::debug) std::cout << "\n";
|
||||
return outliers;
|
||||
}
|
BIN
htdocs/examples/lab.png
Normal file
BIN
htdocs/examples/lab.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 422 KiB |
54
htdocs/examples/package_tables.sh
Normal file
54
htdocs/examples/package_tables.sh
Normal file
@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This still isn't really generalized. Need to go to a
|
||||
# specific directory of runs to run this. Feed it a list
|
||||
# of column densities.
|
||||
|
||||
script_dir=$(
|
||||
cd $(dirname $0) ;
|
||||
pwd -P |sed 's@^\(.*\)/scripts.*@\1/scripts@'
|
||||
)
|
||||
base_dir=`pwd`
|
||||
bin_dir="$script_dir/../bin"
|
||||
|
||||
griddir=$1
|
||||
grid_id=$(echo ${griddir}|sed 's@^\.\/\(.*\)@\1@'|sed 's@.grids@@'|sed 's@\/@.@g')
|
||||
|
||||
if [[ -e "fortfiles_${grid_id}.tar.gz" ]]; then
|
||||
echo "Package fortfiles_${grid_id}.tar.gz exists."
|
||||
exit 5
|
||||
else
|
||||
echo "Setting up to compile fortfiles_${grid_id}.tar.gz."
|
||||
fi
|
||||
|
||||
cd $griddir
|
||||
|
||||
if [[ -e "mpi_grid.out" ]]; then
|
||||
echo -n ""
|
||||
else
|
||||
echo "mpi_grid.out doesn't exist"
|
||||
cd $base_dir
|
||||
exit 6
|
||||
fi
|
||||
|
||||
mkdir -p fortfiles
|
||||
cd fortfiles
|
||||
# pwd
|
||||
|
||||
cp ../mpi_grid.in .
|
||||
cp ../*.tab .
|
||||
|
||||
echo "Directory ready. Calling fort file creation."
|
||||
$bin_dir/create_fort_files ../mpi_grid.out $script_dir/../reference/linelist.c17
|
||||
|
||||
echo "Applying interpolative smoothing."
|
||||
$script_dir/operations/bulk_interpolation_fix.sh fort.* > interpolation_report
|
||||
|
||||
|
||||
grid_id=$(echo ${griddir}|sed 's@^\.\/\(.*\)@\1@'|sed 's@.grids@@'|sed 's@\/@.@g')
|
||||
echo "Saving $grid_id grid"
|
||||
tar cf fortfiles_${grid_id}.tar *
|
||||
gzip fortfiles_${grid_id}.tar
|
||||
mv fortfiles_${grid_id}.tar.gz $base_dir
|
||||
|
||||
cd $base_dir
|
57
htdocs/examples/plot.sh
Executable file
57
htdocs/examples/plot.sh
Executable file
@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This metascript uses the available plotting scripts to produce a list of
|
||||
# document-rady plots.
|
||||
|
||||
# Only analyses with this error type will be represented in the atlas
|
||||
errtype=$(cat err_type)
|
||||
ref_band="1367Å"
|
||||
|
||||
case $1 in
|
||||
"PSD"|"psd"|"PSDs"|"PSDS"|"psds")
|
||||
gnuplot_file=psd_atlas.gp
|
||||
scripts/propagate_tables.sh
|
||||
gnuplot_input=$(cat scripts/templates/${gnuplot_file}|perl -pe 's|\n||g')
|
||||
for tabfile in analyses/tables/PSD_*${errtype}*.tab;
|
||||
do
|
||||
echo_band=$(basename $tabfile|
|
||||
sed 's|PSD[_ ]\(.\{5\}\)[_ ]{[^_ ]*}.tab|\1|')
|
||||
if [[ "$echo_band" == "$ref_band" ]] ; then continue; fi
|
||||
gnuplot_input_edit=$(echo "$gnuplot_input"|
|
||||
sed "s|%FILE%|$tabfile|"|
|
||||
sed "s|%LABEL%|$echo_band|")
|
||||
gnuplot_input="${gnuplot_input_edit}"
|
||||
done
|
||||
echo "$gnuplot_input"|perl -pe 's||\n|g' > ${gnuplot_file}
|
||||
gnuplot $gnuplot_file
|
||||
;;
|
||||
|
||||
"lags"|"lag"|"delay"|"delays")
|
||||
gnuplot_file=timelag_atlas.gp
|
||||
gnuplot_input=$(cat scripts/templates/${gnuplot_file}|perl -pe 's|\n||g')
|
||||
scripts/propagate_tables.sh
|
||||
for tabfile in analyses/tables/timelag_*${errtype}*.tab;
|
||||
do
|
||||
ref_band_extracted=$(basename $tabfile|sed 's|timelag_\([^≺]*\)[_ ]≺[_ ][^≺_ ]*[_ ]{[^_ ]*}.tab|\1|')
|
||||
echo_band=$(basename $tabfile|sed 's|timelag_[^≺]*[_ ]≺[_ ]\([^≺_ ]*\)[_ ]{[^_ ]*}.tab|\1|')
|
||||
if [[ "$echo_band" == "$ref_band" ]] ; then continue; fi
|
||||
gnuplot_input_edit=$(echo "$gnuplot_input"|
|
||||
sed "s|%FILE%|$tabfile|"|
|
||||
sed "s|%LABEL%|$echo_band|")
|
||||
gnuplot_input="${gnuplot_input_edit}"
|
||||
done
|
||||
echo "$gnuplot_input"|perl -pe 's||\n|g' > ${gnuplot_file}
|
||||
gnuplot $gnuplot_file
|
||||
;;
|
||||
|
||||
"tophat"|"th")
|
||||
mkdir -p analyses/tables/
|
||||
scripts/tophat_fft.pl
|
||||
gnuplot scripts/templates/tophat_freqdomain.gp
|
||||
gnuplot scripts/templates/tophat_timedomain.gp
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Did not understand plot type."
|
||||
;;
|
||||
esac
|
88
htdocs/examples/populate_bicrystals.sh
Executable file
88
htdocs/examples/populate_bicrystals.sh
Executable file
@ -0,0 +1,88 @@
|
||||
# This program will create graphene bicrystal unit cells that can constitute
|
||||
# an infinite lattice when periodic boundary conditions are applied (in 2d).
|
||||
|
||||
# Usage: ./populate_bicrystals.sh <root dir> [source dir]
|
||||
|
||||
|
||||
# The program will create a unit cell from any POSCAR files found under
|
||||
# [source dir] (or pwd if [source dir] is not specified]. It will create a
|
||||
# new directory structure under <root dir>, maintaining naming and directory
|
||||
# conventions. The program uses LAMMPS sample minimization and simulation box
|
||||
# relaxation to create the lowest possible energy state from the given lattice.
|
||||
|
||||
# The program is meant to be used to minimize samples created by Kien in the MSL. The program should minimize all samples and output
|
||||
# unit cells that can then be used in future LAMMPS PBC simulations.
|
||||
|
||||
|
||||
|
||||
# The program is organized into three stages: preparation, minimization, and
|
||||
# analysis
|
||||
|
||||
# In stage 1, the program will find all files ending in .POSCAR in any
|
||||
# subdirectories of user's current pwd, convert them to jmol cartesian
|
||||
# snapshots, then populate a new directory tree under <root dir> with
|
||||
# these snapshots. The program will then convert each jmol snapshot to a
|
||||
# LAMMPS input file and shrink the initial box around the sample. The
|
||||
# program will also populate an info file for each sample, and store at
|
||||
# this time the grain boundary mismatch angle in that file. The current info
|
||||
# file format as of 2014 July 19 is:
|
||||
|
||||
# All angles in degrees
|
||||
# All energies in eV (presuming metal units in LAMMPS)
|
||||
|
||||
# <sample dir>/<sample name>
|
||||
# ==============
|
||||
# <misorientation angle>
|
||||
# <num atoms> atoms
|
||||
# <formation energy> - <formation energy of pristine graphene>
|
||||
# <initial box dimensions>
|
||||
# <final box dimensions>
|
||||
# Coordinations
|
||||
# -------------
|
||||
# 4: # atoms
|
||||
# 3: # atoms
|
||||
# 2: # atoms
|
||||
# 1: # atoms
|
||||
|
||||
|
||||
# In stage 2, the program will run relax_sample_and_box.sh on each sample
|
||||
# from stage 1, thus creating a unit cell for PBC from each sample.
|
||||
|
||||
# In stage 3, the program will then check bond lengths for each sample and
|
||||
# report any outliers in that sample's info file. Next, a JMOL snapshot will
|
||||
# be generated. Finally, the system's change in energy over the minimzation
|
||||
# will be calculated and stored in the info file. A report is then produced
|
||||
# in <root dir> containing a print of all info files.
|
||||
|
||||
|
||||
script_dir=$( cd $(dirname $0) ; pwd -P |sed 's@^\(.*\)/scripts.*@\1/scripts@')
|
||||
bin_dir="$script_dir/../bin"
|
||||
root_dir=$1
|
||||
if [[ -z $2 ]]; then source_dir=$(pwd); else source_dir=$2; fi
|
||||
|
||||
source $script_dir/func/utility.src
|
||||
|
||||
|
||||
# (Stage 1) -- Preparation
|
||||
|
||||
$script_dir/bicrystal/stage1.sh $root_dir $source_dir
|
||||
echo -e "\nStage 1 complete. \n"
|
||||
|
||||
|
||||
# (Stage 2) -- Minimization
|
||||
|
||||
$script_dir/bicrystal/stage2.sh $root_dir
|
||||
echo -e "Stage 2 complete.\n"
|
||||
|
||||
|
||||
# (Stage 3) -- Analysis
|
||||
|
||||
echo "Please run stage 3 manually: scripts/bicrystal/stage3.sh $root_dir"
|
||||
|
||||
# $script_dir/bicrystal/stage3.sh $root_dir
|
||||
# echo -e "Stage 3 complete.\n"
|
||||
|
||||
|
||||
echo Program complete.
|
||||
|
||||
|
206
htdocs/examples/process_output.pl
Executable file
206
htdocs/examples/process_output.pl
Executable file
@ -0,0 +1,206 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#!/usr/bin/env perl
|
||||
|
||||
# This extracts the gnuplot-ready tables from a psdlag analysis
|
||||
# and saves them to temporary files. propogate_tables.sh uses this
|
||||
# script to create the tables it saves to analyses/tables.
|
||||
|
||||
use utf8;
|
||||
use Encode qw(encode decode);
|
||||
use feature 'say';
|
||||
use locale;
|
||||
use Switch;
|
||||
|
||||
use constant PI => 4 * atan2(1, 1);
|
||||
|
||||
# Enables various levels of output.
|
||||
our $verbose=0;
|
||||
our $debug=0;
|
||||
|
||||
if ($debug) { $verbose=1; }
|
||||
|
||||
# @ARGV[0] takes the echo wavelength for use in filename
|
||||
$echo_λ = @ARGV[0];
|
||||
$ref_λ = "1367A";
|
||||
|
||||
# This section locates the output data of interest in a
|
||||
# psdlab output file.
|
||||
if (!${^UTF8LOCALE}) {
|
||||
say encode($charset,"You are not using UTF-8 encoding. :(");
|
||||
}
|
||||
my $charset=$ENV{LANG};
|
||||
|
||||
|
||||
my $echopsd_tabfile="data/tables/psd_${echo_λ}.tab";
|
||||
my $refpsd_tabfile="data/tables/psd_${ref_λ}.tab";
|
||||
my $timelag_tabfile="data/tables/lag_${ref_λ}_${echo_λ}.tab";
|
||||
|
||||
# This program attempts to fill function_bin with the tabulated
|
||||
# PSDs and time lags.
|
||||
our %function_bin = ();
|
||||
|
||||
open $freq_file,'<',"freq.out" or die $!;
|
||||
open $ref_psd_file,'<',"ref_psd.out" or die $!;
|
||||
open $echo_psd_file,'<',"echo_psd.out" or die $!;
|
||||
open $crsspctrm_file,'<',"crsspctrm.out" or die $!;
|
||||
open $timelag_file,'<',"lag.out" or die $!;
|
||||
|
||||
=pod
|
||||
|
||||
This section collects the various quantities.
|
||||
|
||||
=cut
|
||||
|
||||
@bin_bounds = split /\s/,<$freq_file>;
|
||||
@ref_psd = split /\s/,<$ref_psd_file>;
|
||||
@ref_psd_σ = split /\s/,<$ref_psd_file>;
|
||||
@echo_psd = split /\s/,<$echo_psd_file>;
|
||||
@echo_psd_σ = split /\s/,<$echo_psd_file>;
|
||||
@crsspctrm_psd = split /\s/,<$crsspctrm_file>;
|
||||
@crsspctrm_psd_σ = split /\s/,<$crsspctrm_file>;
|
||||
@timelag = split /\s/,<$timelag_file>;
|
||||
@timelag_σ = split /\s/,<$timelag_file>;
|
||||
|
||||
if ($debug) {
|
||||
print encode($charset,"Found bin boundaries: ");
|
||||
foreach (@bin_bounds) {print encode($charset,"$_ ");}
|
||||
say encode($charset," ");
|
||||
}
|
||||
my $count = 0;
|
||||
my $upper_bound = 0;
|
||||
my $lower_bound = 0;
|
||||
foreach (@bin_bounds) {
|
||||
$lower_bound = $upper_bound;
|
||||
$upper_bound = $_;
|
||||
if ($lower_bound == 0) {next}
|
||||
my $μ = ($upper_bound + $lower_bound)/2;
|
||||
my $Δ = ($upper_bound - $lower_bound)/2;
|
||||
#say ($μ,":",$Δ);
|
||||
# push(@freq_coords_mean,$μ);
|
||||
# push(@freq_coords_σ,$Δ);
|
||||
$function_bin{$μ} = {"Δ" => $Δ,
|
||||
"ref_PSD_μ" => $ref_psd[$count],
|
||||
"ref_PSD_σ" => $ref_psd_σ[$count],
|
||||
"echo_PSD_μ" => $echo_psd[$count],
|
||||
"echo_PSD_σ" => $echo_psd_σ[$count],
|
||||
"crsspctrm_μ" => $crosssp_psd[$count],
|
||||
"crsspctrm_σ" => $crosssp_psd_σ[$count],
|
||||
"timelag_μ" => $timelag[$count],
|
||||
"timelag_σ" => $timelag_σ[$count]};
|
||||
# $function_bin{$μ}{"φdiff_μ"} = $μ;
|
||||
# $function_bin{$μ}{"φdiff_σ"} = $σ;
|
||||
# $μ = $μ/(2*PI*$μ);
|
||||
# $σ = $σ/(2*PI*$μ);
|
||||
$count = $count + 1;
|
||||
}
|
||||
|
||||
close $freq_file;
|
||||
close $ref_psd_file;
|
||||
close $echo_psd_file;
|
||||
close $crsspctrm_file;
|
||||
close $timelag_file;
|
||||
|
||||
$numbins = keys %function_bin;
|
||||
if ($debug) { say encode($charset,
|
||||
"$numbins frequency bins captured in output.");
|
||||
}
|
||||
|
||||
if($verbose) {
|
||||
say encode($charset,"freq μ (lin) freq σ ".
|
||||
"ref_PSD_μ ref_PSD_σ ".
|
||||
"echo_PSD_μ echo_PSD_σ ".
|
||||
"timelag_μ timelag_σ");
|
||||
|
||||
foreach (sort { $a <=> $b } keys %function_bin) {
|
||||
say encode($charset,
|
||||
sprintf("%f %f %f %f %f ".
|
||||
"%f %f %f ",
|
||||
$_,
|
||||
$function_bin{$_}{"Δ"},
|
||||
$function_bin{$_}{"ref_PSD_μ"},
|
||||
$function_bin{$_}{"ref_PSD_σ"},
|
||||
$function_bin{$_}{"echo_PSD_μ"},
|
||||
$function_bin{$_}{"echo_PSD_σ"},
|
||||
$function_bin{$_}{"timelag_μ"},
|
||||
$function_bin{$_}{"timelag_σ"}
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
say encode($charset,"");
|
||||
|
||||
if($debug) {
|
||||
while (each %function_bin ) {
|
||||
print encode($charset,$_ . ": ");
|
||||
while ( my ($key,$value) = each %{$function_bin{$_}} ) {
|
||||
print encode($charset,$key . " => " . $value . "; ");
|
||||
}
|
||||
say encode($charset,"");
|
||||
}
|
||||
}
|
||||
|
||||
close($outputfile);
|
||||
|
||||
open($datafile,'>',$refpsd_tabfile) or die $!;
|
||||
while( each %function_bin) {
|
||||
say $datafile
|
||||
$_ . " " .
|
||||
$function_bin{$_}{"ref_PSD_μ"} . " " .
|
||||
$function_bin{$_}{"Δ"} . " " .
|
||||
$function_bin{$_}{"ref_PSD_σ"};
|
||||
}
|
||||
close($datafile);
|
||||
|
||||
open($datafile,'>',$echopsd_tabfile) or die $!;
|
||||
while( each %function_bin) {
|
||||
say $datafile
|
||||
$_ . " " .
|
||||
$function_bin{$_}{"echo_PSD_μ"} . " " .
|
||||
$function_bin{$_}{"Δ"} . " " .
|
||||
$function_bin{$_}{"echo_PSD_σ"};
|
||||
}
|
||||
close($datafile);
|
||||
|
||||
# cross spectrum not needed
|
||||
# open($datafile,'>',"tmp.crsspctrm") or die $!;
|
||||
# while( each %function_bin) {
|
||||
# say $datafile
|
||||
# $_ . " " .
|
||||
# $function_bin{$_}{"crsspctrm_PSD_μ"} . " " .
|
||||
# $function_bin{$_}{"Δ"} . " " .
|
||||
# $function_bin{$_}{"crsspctrm_PSD_σ"};
|
||||
# }
|
||||
# close($datafile);
|
||||
|
||||
open($datafile,'>',$timelag_tabfile) or die $!;
|
||||
while( each %function_bin) {
|
||||
say $datafile
|
||||
$_ . " " .
|
||||
$function_bin{$_}{"timelag_μ"} . " " .
|
||||
$function_bin{$_}{"Δ"} . " " .
|
||||
$function_bin{$_}{"timelag_σ"};
|
||||
}
|
||||
close($datafile);
|
||||
|
||||
|
||||
#open($datafile,'>',"tmp.echoPSD") or die $!;
|
||||
|
||||
|
||||
open($datafile,'>',"data/tables/cackett_" . $echo_λ . ".tab") or die $!;
|
||||
say $datafile encode($charset,
|
||||
sprintf("#freqmin freqmax psd ".
|
||||
"psd error lag lag error "
|
||||
));
|
||||
foreach ( sort {$a <=> $b} keys %function_bin ) {
|
||||
say $datafile encode($charset,
|
||||
sprintf("%e %e %e %e %e %e",
|
||||
($_ - $function_bin{$_}{"Δ"}),
|
||||
($_ + $function_bin{$_}{"Δ"}),
|
||||
$function_bin{$_}{"echo_PSD_μ"},
|
||||
$function_bin{$_}{"echo_PSD_σ"},
|
||||
$function_bin{$_}{"timelag_μ"},
|
||||
$function_bin{$_}{"timelag_σ"}
|
||||
));
|
||||
}
|
||||
close($datafile)
|
BIN
htdocs/examples/rack.jpg
Normal file
BIN
htdocs/examples/rack.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 142 KiB |
BIN
htdocs/examples/rack1.jpg
Normal file
BIN
htdocs/examples/rack1.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.8 MiB |
BIN
htdocs/examples/rack2.jpg
Normal file
BIN
htdocs/examples/rack2.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 5.1 MiB |
46
htdocs/examples/run_analysis.sh
Executable file
46
htdocs/examples/run_analysis.sh
Executable file
@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This script is meant to encapsulate all functions in proper order to produce
|
||||
# the analyses. Parameters that this script should control: Δt, σ type, input
|
||||
# dataset(lightcurve directory), more?
|
||||
|
||||
ref_band=$(cat ref_band)
|
||||
ref_curve="lightcurves/${ref_band}.lc"
|
||||
err_str=$(cat err_type)
|
||||
|
||||
mkdir -p analyses/tables
|
||||
|
||||
for echo_curve in lightcurves/*
|
||||
do
|
||||
# Grab and determine labels of analyses, skip if over the same band.
|
||||
echo_band=$(basename $echo_curve|sed 's|\(.*\)\.lc|\1|')
|
||||
if [[ $ref_band == $echo_band ]]; then continue; fi
|
||||
|
||||
echo "Analysing $echo_band ≺ $ref_band."
|
||||
|
||||
if [[ -e "analyses/${ref_band}_≺_${echo_band}/" ]]; then
|
||||
echo "Results already exists. Create tables from stored results."
|
||||
cp analyses/${ref_band}_≺_${echo_band}/*.out .
|
||||
else
|
||||
time python scripts/analyze_lightcurve.py $ref_curve $echo_curve >> ${echo_curve}.log
|
||||
mkdir -p "analyses/${ref_band}_≺_${echo_band}"
|
||||
cp *.out analyses/${ref_band}_≺_${echo_band}/
|
||||
fi
|
||||
|
||||
echo "Tabling PSD and time lags referred to ${ref_band} for $echo_band{${err_str}}."
|
||||
|
||||
# Propagate tables into analyses/tables
|
||||
echoPSD_tabfile=analyses/tables/PSD_${echo_band}_\{${err_str}\}.tab
|
||||
refPSD_tabfile=analyses/tables/PSD_${ref_band}_\{${err_str}\}.tab
|
||||
timelag_tabfile=analyses/tables/timelag_${ref_band}_≺_${echo_band}_\{${err_str}\}.tab
|
||||
|
||||
# Output curves to temporary files using perl script, move tables to
|
||||
# permanent location. This just assumes there are no conflicts.
|
||||
scripts/extract_tables.pl
|
||||
mv tmp.echoPSD $echoPSD_tabfile
|
||||
mv tmp.refPSD $refPSD_tabfile
|
||||
mv tmp.timelag $timelag_tabfile
|
||||
rm *.out
|
||||
done
|
||||
|
||||
rm tmp.*
|
16
htdocs/examples/save_table2d_slice.cpp
Normal file
16
htdocs/examples/save_table2d_slice.cpp
Normal file
@ -0,0 +1,16 @@
|
||||
// Prints a 2d table of an emission flux read from a
|
||||
// file (arg1), as a function of phi for a constant
|
||||
// hden (arg 2). Table is printed to a file (arg 3).
|
||||
|
||||
#include "spectral_lines.hpp"
|
||||
|
||||
int main(int argc, char const *argv[])
|
||||
{
|
||||
std::ifstream filein(argv[1]);
|
||||
agn::eqwidth_table input_table = agn::read_eqwidth_table(filein);
|
||||
filein.close();
|
||||
std::ofstream fileout(argv[3]);
|
||||
fileout << agn::format_table1d(input_table.value[atof(argv[2])]);
|
||||
return 0;
|
||||
}
|
||||
|
518
htdocs/examples/sed.hpp
Normal file
518
htdocs/examples/sed.hpp
Normal file
@ -0,0 +1,518 @@
|
||||
#ifndef sed_hpp
|
||||
#define sed_hpp
|
||||
|
||||
|
||||
|
||||
#include "agn.hpp"
|
||||
#include "spline.h"
|
||||
|
||||
namespace agn {
|
||||
|
||||
// Cloudy's continuum domain, for reference
|
||||
// Pulled from cloudy 17.00, first version
|
||||
// rfield.emm = 1.001e-8f;
|
||||
// rfield.egamry = 7.354e6f;
|
||||
const double CLOUDY_EMM = 1.001e-8; // in Rydberg
|
||||
const double CLOUDY_EGAMRY = 7.354e6; // in Rydberg
|
||||
const double CLOUDY_MIN_EV=CLOUDY_EMM*RYDBERG_UNIT_EV;
|
||||
const double CLOUDY_MAX_EV=CLOUDY_EGAMRY*RYDBERG_UNIT_EV;
|
||||
|
||||
// Continuum domain, step size constant in log space
|
||||
const double CONT_MIN_ENERGY=CLOUDY_MIN_EV; // eV
|
||||
const double CONT_MAX_ENERGY=CLOUDY_MAX_EV; // eV
|
||||
const double CONT_MIN_LOGX=log10(CONT_MIN_ENERGY);
|
||||
const double CONT_MAX_LOGX=log10(CONT_MAX_ENERGY);
|
||||
const double CONT_WIDTH_LOGX=CONT_MAX_LOGX - CONT_MIN_LOGX;
|
||||
const double CONT_MIN_VAL=1e-35;
|
||||
|
||||
|
||||
const double IN_EV_2500A=12398.41929/2500;
|
||||
|
||||
|
||||
// SEDs are represented by 2d histogram tables.
|
||||
struct sed_table {
|
||||
std::string header;
|
||||
table1d table;
|
||||
};
|
||||
|
||||
// To account for the four main powerlaws in a typical
|
||||
// AGN SED.
|
||||
|
||||
// Hardcoded infrared and gamma ray power laws and cutoffs.
|
||||
const double IR_POWER = 3;
|
||||
const double GAMMA_POWER = -5;
|
||||
const double RADIO_CUTOFF = 1e-4; // IN KEV
|
||||
const double GAMMA_CUTOFF = 1e4; // IN KEV
|
||||
|
||||
struct powerlaw_bounds {
|
||||
double ir_min;
|
||||
double ir_max;
|
||||
double uv_min;
|
||||
double uv_max;
|
||||
double xray_min;
|
||||
double xray_max;
|
||||
double gamma_min;
|
||||
double gamma_max;
|
||||
};
|
||||
|
||||
class powerlaw {
|
||||
private:
|
||||
// f(x) = _normal*x^_power
|
||||
double _power;
|
||||
double _normal;
|
||||
public:
|
||||
powerlaw(): _power(0), _normal(0) {}
|
||||
powerlaw(coord2d x0,coord2d x1):
|
||||
_power((log(x1.second)-log(x0.second))/(log(x1.first)-log(x0.first))),
|
||||
_normal(exp(log(x0.second)-(_power*log(x0.first))))
|
||||
{}
|
||||
powerlaw(coord2d x0,double power):
|
||||
_power(power),
|
||||
_normal(exp(log(x0.second)-(_power*log(x0.first))))
|
||||
{}
|
||||
double eval(double hnu) {
|
||||
return
|
||||
_normal
|
||||
* pow(hnu,_power)
|
||||
* exp(-(hnu)/GAMMA_CUTOFF)
|
||||
* exp(-(RADIO_CUTOFF/hnu))
|
||||
;
|
||||
}
|
||||
};
|
||||
|
||||
class sed {
|
||||
public:
|
||||
// Continuum output functions
|
||||
// Returns histogram with n bins evenly space in log space
|
||||
sed_table histogram_table(int n);
|
||||
|
||||
// Argument is photon energy in eV
|
||||
virtual double eval(double hnu) {};
|
||||
|
||||
sed() {};
|
||||
};
|
||||
|
||||
class sed_powerlaw_spline : public sed {
|
||||
private:
|
||||
Spline<double,double> _output_model;
|
||||
powerlaw _ir_powerlaw;
|
||||
powerlaw _uv_powerlaw;
|
||||
powerlaw _xray_powerlaw;
|
||||
powerlaw _gamma_powerlaw;
|
||||
powerlaw_bounds _bounds;
|
||||
|
||||
// These parameters might still be useful for rolling off various quantities, but aren't used in the strict-spline case.
|
||||
|
||||
// Derived values
|
||||
double _cutoff_uv_eV; // IRCut
|
||||
double _cutoff_xray_eV; // lowend_cutoff
|
||||
double _radius_in_cm;
|
||||
double _radius_in_cm_squared;
|
||||
double _scaling_factor;
|
||||
double _xray_coefficient;
|
||||
|
||||
public:
|
||||
double eval(double hnu);
|
||||
sed_powerlaw_spline(agn::sed_table& samples,
|
||||
agn::sed_table& powerlaw_coords);
|
||||
powerlaw * getpowerlaw(double hnu);
|
||||
};
|
||||
|
||||
class sed_pow_law : public sed {
|
||||
public:
|
||||
double eval(double hnu);
|
||||
// Argument is photon energy in eV
|
||||
double eval_uv(double hnu);
|
||||
double eval_xray(double hnu);
|
||||
// Determined differently to be of use as the
|
||||
// xray coefficient.
|
||||
double SED_at_2KeV();
|
||||
|
||||
|
||||
// Continuum shape arguments
|
||||
double _T; //TCut
|
||||
double _alpha_ox;
|
||||
double _alpha_x;
|
||||
double _alpha_uv;
|
||||
double _cutoff_uv_rydberg;
|
||||
double _cutoff_xray_rydberg;
|
||||
double _log_radius_in_cm;
|
||||
|
||||
// Derived values
|
||||
double _cutoff_uv_eV; // IRCut
|
||||
double _cutoff_xray_eV; // lowend_cutoff
|
||||
double _radius_in_cm;
|
||||
double _radius_in_cm_squared;
|
||||
double _scaling_factor;
|
||||
double _xray_coefficient;
|
||||
|
||||
sed_pow_law (
|
||||
double T,
|
||||
double alpha_ox,
|
||||
double alpha_x,
|
||||
double alpha_uv,
|
||||
double cutoff_uv_rydberg,
|
||||
double cutoff_xray_rydberg,
|
||||
double log_radius_in_cm,
|
||||
double scaling_factor = 1.0
|
||||
|
||||
// EL[e] model scaling factor
|
||||
// double scaling_factor = 1.39666E44
|
||||
);
|
||||
};
|
||||
|
||||
// Returns coord in eV for given relative coord.
|
||||
double logspace_hnu_at(int i,int n);
|
||||
|
||||
// Takes an SED table as input and returns a string with format:
|
||||
// '<h*nu>\t<flux>\n' for each energy-flux pair
|
||||
std::string format_sed_table(sed_table table);
|
||||
|
||||
// Read continuum from file with '<h*nu>\t<flux>\n' formatting.
|
||||
// Will ignore up to 1 header.
|
||||
sed_table read_sed_table(std::ifstream& table_file);
|
||||
|
||||
// Does the same but converts hnu from rydberg to eV.
|
||||
sed_table read_and_convert_sed_table(std::ifstream& table_file);
|
||||
|
||||
// Cloudy takes the SED density as input. This function outputs
|
||||
// the corresponding SED table's SED density function in the form
|
||||
// of a cloudy input script "interpolate" command.
|
||||
std::string cloudy_interpolate_str(sed_table SED);
|
||||
|
||||
} // end namespace agn
|
||||
|
||||
|
||||
// Constructors
|
||||
|
||||
agn::sed_powerlaw_spline::sed_powerlaw_spline(
|
||||
agn::sed_table& samples,
|
||||
agn::sed_table& powerlaw_coords
|
||||
)
|
||||
{
|
||||
// coordinate vectors will be used to construct spline sed model
|
||||
std::vector<double> x0;
|
||||
std::vector<double> x1;
|
||||
|
||||
// powerlaws are evaluated across four regions of the sed, first
|
||||
// we construct the powerlaws, here, and locate them
|
||||
iterator1d table_it = powerlaw_coords.table.begin();
|
||||
double ir_power = agn::IR_POWER;
|
||||
double gamma_power = agn::GAMMA_POWER;
|
||||
coord2d ir_high_point = *table_it; table_it++;
|
||||
coord2d uv_low_point = *table_it; table_it++;
|
||||
coord2d uv_high_point = *table_it; table_it++;
|
||||
coord2d xray_low_point = *table_it; table_it++;
|
||||
coord2d xray_high_point = *table_it; table_it++;
|
||||
coord2d gamma_low_point = *table_it;
|
||||
_ir_powerlaw = powerlaw(ir_high_point,ir_power);
|
||||
_uv_powerlaw = powerlaw(uv_low_point,uv_high_point);
|
||||
_xray_powerlaw = powerlaw(xray_low_point,xray_high_point);
|
||||
_gamma_powerlaw = powerlaw(gamma_low_point,gamma_power);
|
||||
|
||||
_bounds.ir_min = agn::CLOUDY_MIN_EV;
|
||||
_bounds.ir_max = ir_high_point.first;
|
||||
_bounds.uv_min = uv_low_point.first;
|
||||
_bounds.uv_max = uv_high_point.first;
|
||||
_bounds.xray_min = xray_low_point.first;
|
||||
_bounds.xray_max = xray_high_point.first;
|
||||
_bounds.gamma_min = gamma_low_point.first;
|
||||
_bounds.gamma_max = agn::CLOUDY_MAX_EV;
|
||||
|
||||
if(agn::debug) {
|
||||
std::cout << "[Constructor] Powerlaw Boundaries: \n";
|
||||
std::cout << _bounds.ir_min << std::endl;
|
||||
std::cout << _bounds.ir_max << std::endl;
|
||||
std::cout << _bounds.uv_min << std::endl;
|
||||
std::cout << _bounds.uv_max << std::endl;
|
||||
std::cout << _bounds.xray_min << std::endl;
|
||||
std::cout << _bounds.xray_max << std::endl;
|
||||
std::cout << _bounds.gamma_min << std::endl;
|
||||
std::cout << _bounds.gamma_max << std::endl;
|
||||
}
|
||||
|
||||
// here we inject the powerlaws into the samples
|
||||
int segments=100;
|
||||
double hnu = 0;
|
||||
double value = 0;
|
||||
for (int i=0; i<=segments; i++) {
|
||||
hnu =
|
||||
_bounds.ir_min +
|
||||
(i/(double)segments)*(_bounds.ir_max - _bounds.ir_min);
|
||||
value = _ir_powerlaw.eval(hnu);
|
||||
coord2d point = coord2d(hnu,value);
|
||||
samples.table.insert(samples.table.end(),point);
|
||||
}
|
||||
|
||||
for (int i=0; i<=segments; i++) {
|
||||
hnu =
|
||||
_bounds.uv_min +
|
||||
(i/(double)segments)*(_bounds.uv_max - _bounds.uv_min);
|
||||
value = _uv_powerlaw.eval(hnu);
|
||||
coord2d point = coord2d(hnu,value);
|
||||
samples.table.insert(samples.table.end(),point);
|
||||
}
|
||||
|
||||
for (int i=0; i<=segments; i++) {
|
||||
hnu =
|
||||
_bounds.xray_min +
|
||||
(i/(double)segments)*(_bounds.xray_max - _bounds.xray_min);
|
||||
value = _xray_powerlaw.eval(hnu);
|
||||
coord2d point = coord2d(hnu,value);
|
||||
samples.table.insert(samples.table.end(),point);
|
||||
}
|
||||
for (int i=0; i<=segments; i++) {
|
||||
hnu =
|
||||
_bounds.gamma_min +
|
||||
(i/(double)segments)*(_bounds.gamma_max - _bounds.gamma_min);
|
||||
value = _gamma_powerlaw.eval(hnu);
|
||||
coord2d point = coord2d(hnu,value);
|
||||
samples.table.insert(samples.table.end(),point);
|
||||
}
|
||||
|
||||
if(agn::debug) {
|
||||
std::cout
|
||||
<< "[Constructor] Samples after evaluating powerlaws: \n";
|
||||
std::cout << agn::format_sed_table(samples);
|
||||
}
|
||||
|
||||
// load all samples into coordinate vectors
|
||||
table_it = samples.table.begin();
|
||||
while(table_it != samples.table.end()) {
|
||||
x0.push_back(table_it->first);
|
||||
x1.push_back(table_it->second);
|
||||
table_it++;
|
||||
}
|
||||
Spline<double,double> newspline(x0,x1);
|
||||
_output_model = newspline;
|
||||
}
|
||||
|
||||
agn::sed_pow_law::sed_pow_law (
|
||||
double T,
|
||||
double alpha_ox,
|
||||
double alpha_x,
|
||||
double alpha_uv,
|
||||
double cutoff_uv_rydberg,
|
||||
double cutoff_xray_rydberg,
|
||||
double log_radius_in_cm,
|
||||
double scaling_factor
|
||||
):
|
||||
_T(T),
|
||||
_alpha_ox(alpha_ox),
|
||||
_alpha_x(alpha_x),
|
||||
_alpha_uv(alpha_uv),
|
||||
_cutoff_uv_rydberg(cutoff_uv_rydberg),
|
||||
_cutoff_xray_rydberg(cutoff_xray_rydberg),
|
||||
_log_radius_in_cm(log_radius_in_cm),
|
||||
_scaling_factor(scaling_factor)
|
||||
{
|
||||
_cutoff_uv_eV = cutoff_uv_rydberg*RYDBERG_UNIT_EV;
|
||||
_cutoff_xray_eV = cutoff_xray_rydberg*RYDBERG_UNIT_EV;
|
||||
_radius_in_cm = pow(10,log_radius_in_cm);
|
||||
_radius_in_cm_squared = _radius_in_cm*_radius_in_cm;
|
||||
_xray_coefficient = agn::sed_pow_law::SED_at_2KeV();
|
||||
}
|
||||
|
||||
|
||||
// Class Functions
|
||||
|
||||
// writes log-space histogram with n data
|
||||
agn::sed_table agn::sed::histogram_table(int n){
|
||||
agn::sed_table table;
|
||||
double max=0,min=1,hnu;
|
||||
for(int i=0; i<n; i++) {
|
||||
// evenly space coordinates in log space and save values
|
||||
hnu = logspace_hnu_at(i,n);
|
||||
table.table[hnu] = this->eval(hnu);
|
||||
// Just collects min and max
|
||||
if (table.table[hnu] > max) max = table.table[hnu];
|
||||
if (table.table[hnu] < min) min = table.table[hnu];
|
||||
}
|
||||
return table;
|
||||
}
|
||||
|
||||
// sed_powerlaw_spline evaluation
|
||||
double agn::sed_powerlaw_spline::eval(double hnu) {
|
||||
double magnitude=0.0;
|
||||
agn::powerlaw * here = this->getpowerlaw(hnu);
|
||||
if (here == NULL)
|
||||
magnitude += this->_output_model[hnu];
|
||||
else
|
||||
magnitude += here->eval(hnu);
|
||||
if (magnitude < agn::CONT_MIN_VAL) return agn::CONT_MIN_VAL;
|
||||
return magnitude;
|
||||
}
|
||||
agn::powerlaw * agn::sed_powerlaw_spline::getpowerlaw(double hnu) {
|
||||
if (hnu <= _bounds.gamma_max && hnu >= _bounds.gamma_min )
|
||||
return &_gamma_powerlaw;
|
||||
if (hnu <= _bounds.uv_max && hnu >= _bounds.uv_min )
|
||||
return &_uv_powerlaw;
|
||||
if (hnu <= _bounds.xray_max && hnu >= _bounds.xray_min )
|
||||
return &_xray_powerlaw;
|
||||
if (hnu <= _bounds.ir_max && hnu >= _bounds.ir_min )
|
||||
return &_ir_powerlaw;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// sed_pow_law evaluations
|
||||
double agn::sed_pow_law::eval(double hnu) {
|
||||
double magnitude=0.0;
|
||||
magnitude += this->eval_uv(hnu);
|
||||
magnitude += this->eval_xray(hnu);
|
||||
|
||||
return magnitude;
|
||||
}
|
||||
double agn::sed_pow_law::eval_uv(double hnu) {
|
||||
double bigbump_kT = _T
|
||||
* agn::BOLTZMANN_CONST;
|
||||
double magnitude = pow(hnu,(1+_alpha_uv))
|
||||
* exp(-(hnu)/bigbump_kT)
|
||||
* exp(-(_cutoff_uv_eV/hnu))
|
||||
* _scaling_factor;
|
||||
if (magnitude < agn::CONT_MIN_VAL) return agn::CONT_MIN_VAL;
|
||||
return magnitude;
|
||||
}
|
||||
double agn::sed_pow_law::eval_xray(double hnu) {
|
||||
return _xray_coefficient
|
||||
* pow(hnu/2000,1+_alpha_x)
|
||||
* exp(-_cutoff_xray_eV/hnu)
|
||||
* _scaling_factor;
|
||||
}
|
||||
double agn::sed_pow_law::SED_at_2KeV() {
|
||||
double ELe_at_2500A_no_scale = eval_uv(IN_EV_2500A)
|
||||
/ _scaling_factor;
|
||||
double energy_ratio = 2000/IN_EV_2500A;
|
||||
// Returns EL[e] at 2 KeV
|
||||
return ELe_at_2500A_no_scale
|
||||
* pow(energy_ratio,_alpha_ox + 1);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// Utilities
|
||||
|
||||
agn::sed_table agn::read_sed_table(std::ifstream& table_file) {
|
||||
sed_table resultant;
|
||||
std::string line;
|
||||
double hnu;
|
||||
if(!isdigit(table_file.peek()) && table_file.peek() != '#') {
|
||||
std::getline(table_file,resultant.header);
|
||||
}
|
||||
while(!table_file.eof()) {
|
||||
std::getline(table_file,line);
|
||||
if (line[0] == '#') continue;
|
||||
std::stringstream scratch(line);
|
||||
scratch >> hnu;
|
||||
scratch >> resultant.table[hnu];
|
||||
}
|
||||
return resultant;
|
||||
}
|
||||
|
||||
|
||||
agn::sed_table agn::read_and_convert_sed_table(std::ifstream& table_file) {
|
||||
sed_table resultant;
|
||||
std::string scratch;
|
||||
int current_line=0;
|
||||
double hnu_in_ryd,hnu_in_ev,table;
|
||||
std::getline(table_file,scratch);
|
||||
if(!isdigit(scratch[0])) {
|
||||
resultant.header = scratch;
|
||||
current_line++;
|
||||
}
|
||||
int c=0;
|
||||
while(!table_file.eof()) {
|
||||
//std::cout << c;
|
||||
table_file >> hnu_in_ryd;
|
||||
hnu_in_ev = hnu_in_ryd*agn::RYDBERG_UNIT_EV;
|
||||
table_file >> resultant.table[hnu_in_ev];
|
||||
getline(table_file,scratch);
|
||||
}
|
||||
}
|
||||
|
||||
std::string agn::format_sed_table(agn::sed_table table) {
|
||||
std::stringstream output;
|
||||
if (!table.header.empty()) output << table.header;
|
||||
output << std::setprecision(5);
|
||||
agn::table1d::iterator table_iterator;
|
||||
table_iterator=table.table.begin();
|
||||
while(table_iterator != table.table.end()) {
|
||||
output
|
||||
<< std::fixed
|
||||
<< std::scientific
|
||||
<< table_iterator->first
|
||||
<< "\t"
|
||||
<< std::scientific
|
||||
<< table_iterator->second
|
||||
<< "\n";
|
||||
table_iterator++;
|
||||
}
|
||||
return output.str();
|
||||
}
|
||||
|
||||
std::string agn::cloudy_interpolate_str(agn::sed_table table) {
|
||||
std::stringstream output;
|
||||
agn::table1d::iterator table_iterator = table.table.begin();
|
||||
// Lead in to uv bump at slope=2 in log(energy [rydberg]) space
|
||||
double energy_in_rydbergs = table_iterator->first
|
||||
/ agn::RYDBERG_UNIT_EV;
|
||||
double log_uv_bump_start = log10( energy_in_rydbergs );
|
||||
double log_lowest_value = log10(table_iterator->second
|
||||
/ table_iterator->first);
|
||||
double log_min_energy = log10(agn::CLOUDY_EMM)
|
||||
- 1;
|
||||
double log_SED_density = log_lowest_value
|
||||
- 2*(log_uv_bump_start
|
||||
- log_min_energy);
|
||||
if ( log_SED_density < 1e-36 ) log_SED_density = 1e-36;
|
||||
output
|
||||
<< "interpolate ("
|
||||
<< pow(10,log_min_energy)
|
||||
<< " "
|
||||
<< log_SED_density
|
||||
<< ")";
|
||||
int count=0;
|
||||
|
||||
while(table_iterator != table.table.end()) {
|
||||
energy_in_rydbergs = table_iterator->first
|
||||
/ agn::RYDBERG_UNIT_EV;
|
||||
double log_SED_density = log10( table_iterator->second
|
||||
/ table_iterator->first);
|
||||
if ((count%5)==0) output << "\n" << "continue ";
|
||||
else output << " ";
|
||||
output
|
||||
<< "("
|
||||
<< energy_in_rydbergs
|
||||
<< " "
|
||||
<< log_SED_density
|
||||
<< ")";
|
||||
count++;
|
||||
table_iterator++;
|
||||
}
|
||||
// Trail off at slope=-2 in log(energy [rydberg]) space
|
||||
while ( energy_in_rydbergs < agn::CLOUDY_EGAMRY ) {
|
||||
double log_energy = log10(energy_in_rydbergs);
|
||||
energy_in_rydbergs = pow(10,log_energy+1);
|
||||
log_SED_density -= 2;
|
||||
output
|
||||
<< "("
|
||||
<< energy_in_rydbergs
|
||||
<< " "
|
||||
<< log_SED_density
|
||||
<< ")";
|
||||
}
|
||||
return output.str();
|
||||
}
|
||||
|
||||
|
||||
double agn::logspace_hnu_at(int i,int n) {
|
||||
double relative_coord_logspace=(double)(i)/n;
|
||||
double abs_coord_logspace = relative_coord_logspace*CONT_WIDTH_LOGX + CONT_MIN_LOGX;
|
||||
return pow(10,abs_coord_logspace);
|
||||
}
|
||||
|
||||
|
||||
#endif
|
BIN
htdocs/examples/shelf.jpg
Normal file
BIN
htdocs/examples/shelf.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 136 KiB |
495
htdocs/examples/spectral_lines.hpp
Normal file
495
htdocs/examples/spectral_lines.hpp
Normal file
@ -0,0 +1,495 @@
|
||||
#ifndef spectral_lines_hpp
|
||||
#define spectral_lines_hpp
|
||||
|
||||
#include "agn.hpp"
|
||||
//#include "png++-0.2.5/gray_pixel.hpp"
|
||||
//#include "png++-0.2.5/image.hpp"
|
||||
|
||||
|
||||
namespace agn {
|
||||
|
||||
const double EQWIDTH_MIN_VAL = 1E-7;
|
||||
const double EQWIDTH_MIN_VAL_LOG = -7;
|
||||
const double SIGMA_THRESHOLD_MULTIPLIER = 1.0;
|
||||
const double RATIO_THRESHOLD_MULTIPLIER = .06;
|
||||
|
||||
typedef std::list<std::string> line_list;
|
||||
line_list read_line_list(std::ifstream&);
|
||||
|
||||
// Trims whitespace from line labels
|
||||
std::string label_trim(std::string);
|
||||
|
||||
// Emission line flux contours are represented by a singly-nested sorted map.
|
||||
struct eqwidth_table {
|
||||
std::string header[2];
|
||||
table2d value;
|
||||
};
|
||||
bool is_zero(const eqwidth_table&);
|
||||
eqwidth_table read_eqwidth_table(std::ifstream& table_file);
|
||||
std::string format_eqwidth_table(eqwidth_table table);
|
||||
std::string format_eqwidth_table_slice(eqwidth_table table,iterator2d x);
|
||||
gridcoordlist find_outliers(eqwidth_table table);
|
||||
gridcoordlist known_outliers();
|
||||
eqwidth_table smooth(eqwidth_table&, gridcoordlist coordlist);
|
||||
std::list<eqwidth_table> compile_eqwidth_tables(cloudy_grid,line_list,double = 1.0);
|
||||
// Operator<< prints table formatted for a "fort file".
|
||||
std::ostream& operator<< (std::ostream&,eqwidth_table);
|
||||
|
||||
} // end namespace agn
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// Definitions
|
||||
|
||||
bool agn::is_zero(const eqwidth_table& table_to_check) {
|
||||
agn::table2d::const_iterator x_it = table_to_check.value.begin();
|
||||
while (x_it != table_to_check.value.end()) {
|
||||
agn::table1d::const_iterator y_it= x_it->second.begin();
|
||||
while (y_it != x_it->second.end()) {
|
||||
if (y_it->second != EQWIDTH_MIN_VAL)
|
||||
return false;
|
||||
y_it++;
|
||||
}
|
||||
x_it++;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
agn::eqwidth_table agn::read_eqwidth_table(std::ifstream& table_file) {
|
||||
agn::eqwidth_table resultant;
|
||||
double hden, phi, flux;
|
||||
int number_of_lines = 0;
|
||||
std::string null;
|
||||
while (std::getline(table_file, null))
|
||||
++number_of_lines;
|
||||
int table_entries = number_of_lines - 2;
|
||||
table_file.clear();
|
||||
table_file.seekg(0);
|
||||
getline(table_file,resultant.header[0]);
|
||||
getline(table_file,resultant.header[1]);
|
||||
for (int i=0; i<table_entries; i++) {
|
||||
table_file >> hden >> phi >> flux;
|
||||
resultant.value[hden][phi]=flux;
|
||||
}
|
||||
return resultant;
|
||||
}
|
||||
|
||||
std::string agn::format_eqwidth_table(agn::eqwidth_table table) {
|
||||
std::stringstream output;
|
||||
output
|
||||
<< table.header[0]
|
||||
<< "\n"
|
||||
<< table.header[1]
|
||||
<< "\n";
|
||||
agn::iterator2d hden_it = table.value.begin();
|
||||
while(hden_it != table.value.end()) {
|
||||
agn::iterator1d phi_it = table.value[hden_it->first].begin();
|
||||
while(phi_it != table.value[hden_it->first].end()) {
|
||||
output
|
||||
<< std::fixed
|
||||
<< std::setprecision(3)
|
||||
<< hden_it->first
|
||||
<< " "
|
||||
<< phi_it->first
|
||||
<< " "
|
||||
<< std::scientific
|
||||
<< std::setprecision(4)
|
||||
<< phi_it->second
|
||||
<< "\n";
|
||||
phi_it++;
|
||||
}
|
||||
hden_it++;
|
||||
}
|
||||
return output.str();
|
||||
}
|
||||
|
||||
std::string agn::format_eqwidth_table_slice(agn::eqwidth_table table,agn::iterator2d fixed) {
|
||||
std::stringstream output("");
|
||||
// output
|
||||
// << table.header[0]
|
||||
// << " - constant="
|
||||
// << fixed->first
|
||||
// << "\n";
|
||||
iterator1d x=fixed->second.begin();
|
||||
while(x!=fixed->second.end()) {
|
||||
output
|
||||
<< std::fixed
|
||||
<< std::setprecision(3)
|
||||
<< x->first
|
||||
<< "\t"
|
||||
<< std::scientific
|
||||
<< std::setprecision(4)
|
||||
<< x->second
|
||||
<< "\n";
|
||||
x++;
|
||||
}
|
||||
return output.str();
|
||||
}
|
||||
|
||||
|
||||
std::ostream& agn::operator<< ( std::ostream& outstream,
|
||||
agn::eqwidth_table table) {
|
||||
std::string header0_trimmed = agn::label_trim(table.header[0]);
|
||||
outstream
|
||||
<< header0_trimmed
|
||||
<< std::endl
|
||||
<< table.header[1]
|
||||
<< std::endl;
|
||||
agn::table2d::iterator x_it = table.value.begin();
|
||||
while (x_it != table.value.end()) {
|
||||
agn::table1d::iterator y_it= x_it->second.begin();
|
||||
while (y_it != x_it->second.end()) {
|
||||
outstream.width(6);
|
||||
outstream
|
||||
<< std::fixed
|
||||
<< std::setprecision(3)
|
||||
<< x_it->first
|
||||
<< "\t"
|
||||
<< y_it->first
|
||||
<< "\t"
|
||||
<< std::scientific
|
||||
<< std::setprecision(4)
|
||||
<< y_it->second
|
||||
<< "\n";
|
||||
y_it++;
|
||||
}
|
||||
x_it++;
|
||||
}
|
||||
return outstream;
|
||||
}
|
||||
|
||||
|
||||
// Returns a table as the difference between two tables
|
||||
//agn::eqwidth_table main(int argc, char const *argv[])
|
||||
//{ // Copied from fortfile subtractor
|
||||
// std::ifstream table1;
|
||||
// table1.open(argv[1]);
|
||||
// std::ifstream table2;
|
||||
// table2.open(argv[2]);
|
||||
//
|
||||
// std::string line;
|
||||
//
|
||||
// int table1_numlines = 0;
|
||||
// while (std::getline(table1, line))
|
||||
// ++table1_numlines;
|
||||
// table1.clear();
|
||||
// table1.seekg(0);
|
||||
//
|
||||
// int table_entries = table1_numlines - 2;
|
||||
//
|
||||
//// int table2_numlines = 0;
|
||||
//// while (std::getline(table1, line))
|
||||
//// ++table2_numlines;
|
||||
////
|
||||
//// int table2_entries = table2_numlines - 2;
|
||||
//// table2.clear();
|
||||
//// table2.seekg(0);
|
||||
//
|
||||
// double hden1[table_entries];
|
||||
// double phi1[table_entries];
|
||||
// double magnitude1[table_entries];
|
||||
// double resultant_magnitude[table_entries];
|
||||
//
|
||||
// std::string first_line, second_line, scratch;
|
||||
// getline(table1,first_line);
|
||||
// getline(table1,second_line);
|
||||
// getline(table2,scratch);
|
||||
// getline(table2,scratch);
|
||||
//
|
||||
// first_line[0] = argv[3][0];
|
||||
// first_line[1] = argv[3][1];
|
||||
// first_line[2] = argv[3][2];
|
||||
// first_line[3] = argv[3][3];
|
||||
//
|
||||
// for (int i=0; i<table_entries; i++) {
|
||||
// table1 >> hden1[i] >> phi1[i] >> magnitude1 [i];
|
||||
// }
|
||||
//
|
||||
//
|
||||
// int i=0;
|
||||
// double hden2, phi2, magnitude2;
|
||||
// for (int i=0; i<table_entries; i++) {
|
||||
// if ( table2.eof() ) {
|
||||
// resultant_magnitude[i]=magnitude1[i];
|
||||
// }
|
||||
// else {
|
||||
// table2 >> hden2 >> phi2 >> magnitude2;
|
||||
// resultant_magnitude[i] = magnitude1[i] - magnitude2;
|
||||
// }
|
||||
// if ( resultant_magnitude[i] < 1e-10 )
|
||||
// resultant_magnitude[i]=1e-10;
|
||||
// }
|
||||
//
|
||||
//
|
||||
//
|
||||
// std::cout
|
||||
// << first_line
|
||||
// << std::endl
|
||||
// //<< "HDen Phi(H) ScaInten"
|
||||
// << second_line
|
||||
// << std::endl;
|
||||
//
|
||||
// for (int i=0; i<table_entries; i++) {
|
||||
// std::cout
|
||||
// << std::fixed
|
||||
// << std::setprecision(3)
|
||||
// << hden1[i]
|
||||
// << " "
|
||||
// << phi1[i]
|
||||
// << " "
|
||||
// << std::scientific
|
||||
// << std::setprecision(4)
|
||||
// << resultant_magnitude[i]
|
||||
// << std::endl;
|
||||
// }
|
||||
//
|
||||
// table1.close();
|
||||
// table2.close();
|
||||
// return 0;
|
||||
//}
|
||||
|
||||
|
||||
agn::line_list agn::read_line_list(std::ifstream& inputfile) {
|
||||
line_list list;
|
||||
line_list nfnulist;
|
||||
line_list inwtlist;
|
||||
line_list inwclist;
|
||||
inputfile.clear();
|
||||
inputfile.seekg(0);
|
||||
std::string line,label;
|
||||
while(!inputfile.eof()) {
|
||||
getline(inputfile,line);
|
||||
if (line[0] == '#' || line.size() < 2) continue;
|
||||
if (line[0] == ' ' && line[1] == ' ') continue;
|
||||
if (line[0] == ' ')
|
||||
label = line.substr(1,agn::LABELSTR_LEN);
|
||||
else
|
||||
label = line.substr(0,agn::LABELSTR_LEN);
|
||||
// above is only good for c17?
|
||||
if(agn::debug) std::cout
|
||||
<< "Adding label "
|
||||
<< label
|
||||
<< " to capture list.\n";
|
||||
std::string applabel = label.substr(4);
|
||||
if (label.substr(0,4).compare(std::string("nFnu")) == 0) {
|
||||
nfnulist.push_back(std::string("nFnu").append(applabel));
|
||||
//list.push_back(std::string("nInu").append(applabel));
|
||||
inwtlist.push_back(std::string("InwT").append(applabel));
|
||||
inwclist.push_back(std::string("InwC").append(applabel));
|
||||
continue;
|
||||
}
|
||||
list.push_back(label);
|
||||
//if (label.substr(0,4) != "Inci" && applabel != " 0 ")
|
||||
// list.push_back(std::string("Inwd").append(applabel));
|
||||
}
|
||||
list.insert(list.end(),nfnulist.begin(),nfnulist.end());
|
||||
list.insert(list.end(),inwtlist.begin(),inwtlist.end());
|
||||
list.insert(list.end(),inwclist.begin(),inwclist.end());
|
||||
return list;
|
||||
}
|
||||
|
||||
|
||||
std::list<agn::eqwidth_table> agn::compile_eqwidth_tables(agn::cloudy_grid grid,agn::line_list line_labels,double scale_factor) {
|
||||
std::list<eqwidth_table> table_list_eq_width,table_list_radiated_energy;
|
||||
agn::line_list::iterator line_label_it = line_labels.begin();
|
||||
// Iterate over labels, creating table for each:
|
||||
while (line_label_it != line_labels.end()) {
|
||||
double x,y;
|
||||
agn::cloudy_line_data data;
|
||||
agn::eqwidth_table new_table;
|
||||
std::string label = *line_label_it;
|
||||
if(agn::debug) std::cout
|
||||
<< "Processing label "
|
||||
<< label
|
||||
<< std::endl;
|
||||
std::stringstream header0;
|
||||
header0
|
||||
<< label;
|
||||
new_table.header[0] = header0.str();
|
||||
new_table.header[1] = "Hden Phi(H) F(line)/Inci(1215) ";
|
||||
agn::cloudy_grid::iterator result_it = grid.begin();
|
||||
while (result_it != grid.end()) {
|
||||
x = result_it->first.first;
|
||||
y = result_it->first.second;
|
||||
// if no data exist under this label, initialize some:
|
||||
if ( result_it->second.intrinsic_line_intensity.count(label) == 0 ) {
|
||||
data.radiated_energy=-35.0;
|
||||
data.eq_width=0.0;
|
||||
}
|
||||
else {
|
||||
data = result_it->second.intrinsic_line_intensity[label];
|
||||
data.eq_width /= scale_factor;
|
||||
if(agn::line_debug) std::cout
|
||||
<< "Added "
|
||||
<< std::setprecision(2)
|
||||
<< std::fixed
|
||||
<< x
|
||||
<< ", "
|
||||
<< y
|
||||
<< " -- "
|
||||
<< std::scientific
|
||||
<< data.radiated_energy
|
||||
<< ", "
|
||||
<< data.eq_width
|
||||
<< "\n";
|
||||
if ( data.has_duplicates ) {
|
||||
// This needs to be expanded to catch all duplicate values j=1,2,etc.
|
||||
// For now, just blindly use the value the program finds...
|
||||
// the functionality is written in the agn.hpp sections, but is not
|
||||
// implemented here, and as of 2017, the agn.hpp function writes the
|
||||
// last-encountered instance of the emission line value to the label
|
||||
// probably needs to be fixed for the inwd part too
|
||||
}
|
||||
}
|
||||
if ( data.radiated_energy < EQWIDTH_MIN_VAL_LOG || data.eq_width < EQWIDTH_MIN_VAL) {
|
||||
data.radiated_energy = EQWIDTH_MIN_VAL_LOG;
|
||||
data.eq_width = EQWIDTH_MIN_VAL;
|
||||
}
|
||||
new_table.value[x][y] = data.eq_width;
|
||||
result_it++;
|
||||
}
|
||||
table_list_eq_width.push_back(new_table);
|
||||
// Add inward table for emission lines, and only works for c17
|
||||
if (label.substr(agn::LABELSTR_POS,4) != "Inci" &&
|
||||
label.substr(agn::LABELSTR_POS,4) != "FeKa" &&
|
||||
//label.substr(agn::LABELSTR_POS,4) != "Blnd" &&
|
||||
label.substr(agn::LABELSTR_POS,3) != "nFn" &&
|
||||
label.substr(agn::LABELSTR_POS,3) != "Inw" &&
|
||||
label.substr(LABELSTR_LEN - 14) != " 0 ") {
|
||||
// These lines are known to not have inward fractions, so
|
||||
// should be skipped.
|
||||
if (label == "Blnd 1657.00A" ||
|
||||
label == "Blnd 2326.00A" ||
|
||||
label == "Blnd 2798.00A" ||
|
||||
label == "Blnd 1335.00A" ||
|
||||
label == "Blnd 977.000A" ||
|
||||
label == "Blnd 1909.00A" ||
|
||||
label == "Blnd 1176.00A" ||
|
||||
label == "Blnd 2141.00A" ||
|
||||
label == "Blnd 1085.00A" ||
|
||||
label == "Blnd 1750.00A" ||
|
||||
label == "Blnd 990.000A" ||
|
||||
label == "Blnd 1486.00A" ||
|
||||
label == "Blnd 765.000A" ||
|
||||
label == "Blnd 1304.00A" ||
|
||||
label == "Blnd 8446.00A" ||
|
||||
label == "Blnd 1666.00A" ||
|
||||
label == "Blnd 835.000A" ||
|
||||
label == "Blnd 1402.00A" ||
|
||||
label == "Blnd 630.000A" ||
|
||||
label == "Blnd 1218.00A" ||
|
||||
label == "Blnd 774.000A" ||
|
||||
label == "Blnd 5892.00A" ||
|
||||
label == "Blnd 615.000A" ||
|
||||
label == "Blnd 2665.00A" ||
|
||||
label == "Blnd 1860.00A" ||
|
||||
label == "Blnd 2335.00A" ||
|
||||
label == "Blnd 1888.00A" ||
|
||||
label == "Blnd 1397.00A" ||
|
||||
label == "Blnd 1256.00A" ||
|
||||
label == "Blnd 1720.00A" ||
|
||||
label == "Blnd 1198.00A" ||
|
||||
label == "Blnd 1406.00A" ||
|
||||
label == "Blnd 1199.00A" ||
|
||||
label == "Blnd 8579.00A" ||
|
||||
label == "Blnd 3933.00A"
|
||||
) {
|
||||
line_label_it++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if(agn::debug) std::cout
|
||||
<< "Processing Inwd for label "
|
||||
<< label
|
||||
<< std::endl;
|
||||
std::stringstream inwdheader0;
|
||||
agn::eqwidth_table inwd_table;
|
||||
inwdheader0
|
||||
<< "Inwd "
|
||||
<< label.substr(5);
|
||||
inwd_table.header[0] = inwdheader0.str();
|
||||
inwd_table.header[1] = "Hden Phi(H) F(line)/Inci(1215)";
|
||||
result_it = grid.begin();
|
||||
agn::cloudy_line_data inwddata;
|
||||
while (result_it != grid.end()) {
|
||||
x = result_it->first.first;
|
||||
y = result_it->first.second;
|
||||
// if no data exist under this label, initialize some:
|
||||
if ( result_it->second.intrinsic_line_inward_intensity.count(label) == 0 ) {
|
||||
inwddata.radiated_energy=-35.0;
|
||||
inwddata.eq_width=0.0;
|
||||
}
|
||||
else {
|
||||
inwddata = result_it->second.intrinsic_line_inward_intensity[label];
|
||||
inwddata.eq_width /= scale_factor;
|
||||
}
|
||||
if(agn::line_debug) std::cout
|
||||
<< "Added "
|
||||
<< std::setprecision(2)
|
||||
<< std::fixed
|
||||
<< x
|
||||
<< ", "
|
||||
<< y
|
||||
<< " -- "
|
||||
<< std::scientific
|
||||
<< inwddata.radiated_energy
|
||||
<< ", "
|
||||
<< inwddata.eq_width
|
||||
<< "\n";
|
||||
if ( inwddata.has_duplicates ) {
|
||||
// This needs to be expanded to catch all duplicate values j=1,2,etc.
|
||||
// For now, just blindly use the value the program finds...
|
||||
// the functionality is written in the agn.hpp sections, but is not
|
||||
// implemented here, and as of 2017, the agn.hpp function writes the
|
||||
// last-encountered instance of the emission line value to the label
|
||||
// probably needs to be fixed for the inwd part too
|
||||
}
|
||||
if ( inwddata.radiated_energy < EQWIDTH_MIN_VAL_LOG ||
|
||||
inwddata.eq_width < EQWIDTH_MIN_VAL) {
|
||||
inwddata.radiated_energy = EQWIDTH_MIN_VAL_LOG;
|
||||
inwddata.eq_width = EQWIDTH_MIN_VAL;
|
||||
}
|
||||
inwd_table.value[x][y] = inwddata.eq_width;
|
||||
result_it++;
|
||||
}
|
||||
table_list_eq_width.push_back(inwd_table);
|
||||
}
|
||||
line_label_it++;
|
||||
}
|
||||
return table_list_eq_width;
|
||||
}
|
||||
|
||||
|
||||
#endif
|
97
htdocs/examples/stage1.sh
Executable file
97
htdocs/examples/stage1.sh
Executable file
@ -0,0 +1,97 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Stage 1 of the bicrystal unit population program.
|
||||
#
|
||||
# This program will find all files ending in .POSCAR in any subdirectories
|
||||
# of [source dir] (or pwd if [source dir] is not specified), convert them
|
||||
# to jmol cartesian snapshots, then populate a new directory tree under
|
||||
# <root dir> with these snapshots.
|
||||
|
||||
# The program will then convert each jmol snapshot to a LAMMPS input file
|
||||
# and shrink the simulation box around the sample. The program will also
|
||||
# populate an info file for each sample, and store at this time the grain
|
||||
# boundary mismatch angle in that file.
|
||||
|
||||
# Usage: ./stage1.sh <root dir> [source dir]
|
||||
|
||||
|
||||
script_dir=$( cd $(dirname $0) ; pwd -P |sed 's@^\(.*\)/scripts.*@\1/scripts@')
|
||||
bin_dir="$script_dir/../bin"
|
||||
root_dir=$1
|
||||
if [[ -z $2 ]]; then source_dir=$(pwd); else source_dir=$2; fi
|
||||
|
||||
source_dir=$(echo "$source_dir"|sed 's@\([a-zA-Z0-9_-.]\)/*$@\1@')
|
||||
root_dir=$(echo "$root_dir"|sed 's@\([a-zA-Z0-9_-.]\)/*$@\1@')
|
||||
|
||||
source $script_dir/func/utility.src
|
||||
|
||||
|
||||
# Locate all *.POSCAR under $1
|
||||
POSCAR_files=$(find $source_dir -name '*.POSCAR')
|
||||
|
||||
#echo $POSCAR_files
|
||||
num_files=$(echo $POSCAR_files|wc -w)
|
||||
echo $num_files POSCAR files found.
|
||||
|
||||
echo Processing files. Output to $root_dir.
|
||||
|
||||
count=0
|
||||
echo -n "0% complete."
|
||||
for POSCAR_file in $POSCAR_files; do
|
||||
|
||||
# gather info
|
||||
file_name=$(basename $POSCAR_file)
|
||||
sample_name=$(echo $file_name|sed 's/.POSCAR//')
|
||||
sample_dir=$(dirname $POSCAR_file)
|
||||
|
||||
# create new directory
|
||||
new_dir=$(echo $sample_dir|sed "s@^$source_dir@$root_dir/@"|sed 's/.POSCAR//'|sed 's@/$@@')
|
||||
mkdir -p $new_dir
|
||||
|
||||
# ylconv VASP:xyz, move to new dir
|
||||
xyz_initial="${new_dir}/${sample_name}_initial.xyz"
|
||||
$bin_dir/ylconv -svasp:xyz $POSCAR_file > $xyz_initial 2> /dev/null
|
||||
|
||||
# echo $file_name
|
||||
# echo $sample_name
|
||||
# echo $sample_dir
|
||||
# echo $new_dir
|
||||
# echo $xyz_initial
|
||||
|
||||
# convert to LAMMPS input
|
||||
#echo $new_dir
|
||||
#echo $sample_name
|
||||
LAMMPS_input="${new_dir}/${sample_name}.input"
|
||||
#echo $LAMMPS_input
|
||||
#echo "$script_dir/conv/xyz_to_lammps_input.sh $xyz_initial > $LAMMPS_input.tmp"
|
||||
$script_dir/conv/xyz_to_lammps_input.sh $xyz_initial > $LAMMPS_input.tmp
|
||||
|
||||
# shrink box
|
||||
$script_dir/manip/shrink_box.sh $LAMMPS_input.tmp > $LAMMPS_input
|
||||
|
||||
# create info file, write basic information
|
||||
info_file=${new_dir}/info
|
||||
echo "$sample_dir/$sample_name" > $info_file
|
||||
for iteration in $(seq $(echo "$sample_dir/$sample_name"|wc -c)); do echo -n "="; done >> $info_file
|
||||
echo -ne "\n" >> $info_file
|
||||
|
||||
# parse grain mismatch angle and save to info file
|
||||
#mismatch_angle=$(head -n1 $POSCAR_file|sed 's/^.*angle\=\s*\([0-9.]\)\s*$//')
|
||||
mismatch_angle=$(head -n1 $POSCAR_file|sed 's/^.*\=\s*\([0-9]\)[^0-9]*/\1/')
|
||||
echo "$mismatch_angle" >> $info_file
|
||||
|
||||
clean $new_dir > /dev/null
|
||||
|
||||
(( count++ ))
|
||||
|
||||
ratio=$(echo "
|
||||
scale=2
|
||||
count = $count
|
||||
num = $num_files
|
||||
ratio = count / num
|
||||
ratio
|
||||
" | bc)
|
||||
percent=$(echo $ratio|sed 's/\.//')
|
||||
echo -ne "\r${percent}% complete."
|
||||
|
||||
done
|
38
htdocs/examples/stage2.sh
Executable file
38
htdocs/examples/stage2.sh
Executable file
@ -0,0 +1,38 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Stage 2 of the bicrystal unit population program will run
|
||||
# relax_sample_and_box.sh for any LAMMPS input files found under <root dir>,
|
||||
# thus creating a unit cell for PBC from each sample.
|
||||
|
||||
# Usage: ./stage2.sh <root dir>
|
||||
|
||||
script_dir=$( cd $(dirname $0) ; pwd -P |sed 's@^\(.*/scripts\).*@\1@')
|
||||
bin_dir=$script_dir/../bin
|
||||
root_dir=$1
|
||||
|
||||
# find all input files
|
||||
input_files=$(find $root_dir -name '*.input')
|
||||
num_files=$(echo $input_files|wc -w)
|
||||
echo Found $num_files input files.
|
||||
|
||||
count=0
|
||||
echo -n "0% complete."
|
||||
# relax sample/box
|
||||
for input_file in $(echo $input_files); do
|
||||
$script_dir/run/relax_sample_and_box.sh $input_file
|
||||
|
||||
(( count++ ))
|
||||
|
||||
ratio=$(echo "
|
||||
scale=2
|
||||
count = $count
|
||||
num = $num_files
|
||||
ratio = count / num
|
||||
ratio
|
||||
" | bc)
|
||||
percent=$(echo $ratio|sed 's/\.//')
|
||||
echo -ne "\r${percent}% complete."
|
||||
|
||||
done
|
||||
|
||||
echo -en "\n"
|
154
htdocs/examples/stage3.sh
Executable file
154
htdocs/examples/stage3.sh
Executable file
@ -0,0 +1,154 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Usage: ./stage3.sh <root dir>
|
||||
|
||||
# Stage 3 of the bicrystal unit population program runs
|
||||
# post-processing analyses on completed LAMMPS runs located
|
||||
# under <root dir>.
|
||||
#
|
||||
#
|
||||
# Post-processing procedure:
|
||||
#
|
||||
# - Create initial and final state xyz files from .dump files.
|
||||
# - Create ecfg with atomic coordinates, energy, and pressure
|
||||
# - Create xmgrace-readable tables for each structure:
|
||||
# * atomic energy as function of x displacement
|
||||
# * atomic pressure as function of x displacement
|
||||
# * atomic stress xy component as function of x displacement
|
||||
# - Append info files with:
|
||||
# <num atoms> atoms
|
||||
# <formation energy> - <formation energy of pristine graphene>
|
||||
# <initial box dimensions>
|
||||
# <final box dimensions>
|
||||
# Coordinations
|
||||
# -------------
|
||||
# 4: # atoms
|
||||
# 3: # atoms
|
||||
# 2: # atoms
|
||||
# 1: # atoms
|
||||
#
|
||||
#
|
||||
# - Copy contents of all info files to file named "report" under
|
||||
# <root dir>.
|
||||
|
||||
|
||||
|
||||
|
||||
script_dir=$( cd $(dirname $0) ; pwd -P |sed 's@^\(.*/scripts\).*@\1@')
|
||||
bin_dir=$script_dir/../bin
|
||||
root_dir=$1
|
||||
|
||||
source $script_dir/func/log_parsing.src
|
||||
source $script_dir/func/dump_parsing.src
|
||||
source $script_dir/func/utility.src
|
||||
|
||||
pristine_form_energy="-7.956" # eV/atom, value obtained from SEDREBO
|
||||
# relaxation of pristine structure.
|
||||
|
||||
|
||||
# Find all info files, used as indicators of completed LAMMPS run.
|
||||
info_files=$(find $root_dir -name 'info')
|
||||
num_info_files=$(echo $info_files|wc -w)
|
||||
|
||||
# Find and write formation energy difference and coordination histogram.
|
||||
echo -n "Analysing data, writing xyz files, creating tables, and "
|
||||
echo -ne "appending info files for $num_info_files structures.\n"
|
||||
sample_count=1
|
||||
count=0
|
||||
echo -n "0% complete."
|
||||
|
||||
for info_file in $info_files; do
|
||||
# Locate files.
|
||||
proc_dir=$(dirname $info_file)
|
||||
initial_dump_file=$(find $proc_dir -name '*.dump'|sort -n|sed -n 1p)
|
||||
final_dump_file=$(find $proc_dir -name '*.dump'|sort -n -r|sed -n 1p)
|
||||
|
||||
# Create initial and final state xyz files.
|
||||
$script_dir/conv/lammps_dump_to_xyz.sh $initial_dump_file $proc_dir/initial.xyz >> /dev/null
|
||||
$script_dir/conv/lammps_dump_to_xyz.sh $final_dump_file $proc_dir/final.xyz >> /dev/null
|
||||
|
||||
#echo yo!
|
||||
|
||||
# Create ecfg file with atomic energies and pressures.
|
||||
$script_dir/conv/lammps_dump_to_xyz_with_energy_and_pressure.sh $final_dump_file $proc_dir/final_wEP.xyz.tmp >> /dev/null
|
||||
$bin_dir/ylconv -sxyz:ecfg $proc_dir/final_wEP.xyz.tmp > $proc_dir/final.ecfg >> /dev/null 2> /dev/null
|
||||
|
||||
# Create "minimized.input" for each final dump.
|
||||
$script_dir/conv/lammps_dump_to_input.sh $final_dump_file $proc_dir/minimized.input >> /dev/null
|
||||
|
||||
# Create xmgrace-readable tables.
|
||||
energy_table_file="$proc_dir/energy_afo_x.xm"
|
||||
pressure_table_file="$proc_dir/pressure_afo_x.xm"
|
||||
stressxy_table_file="$proc_dir/stressxy_afo_x.xm"
|
||||
$script_dir/analysis/graph_energy_afo_x_displacement.sh $final_dump_file $energy_table_file >> /dev/null
|
||||
$script_dir/analysis/graph_pressure_afo_x_displacement.sh $final_dump_file $pressure_table_file >> /dev/null
|
||||
$script_dir/analysis/graph_stressxy_afo_x_displacement.sh $final_dump_file $stressxy_table_file >> /dev/null
|
||||
|
||||
#echo here!
|
||||
|
||||
# Gather info.
|
||||
energy=$(parse_final_energy $proc_dir/log.lmp)
|
||||
num_atoms=$(parse_num_atoms_from_dump $final_dump_file)
|
||||
form_energy=$(bc <<< "scale=10; $energy / $num_atoms")
|
||||
form_energy_diff=$(bc <<< "scale=10; $form_energy - $pristine_form_energy")
|
||||
box_xyz_initial=$(parse_box_dimensions_from_dump $initial_dump_file)
|
||||
box_xyz_final=$(parse_box_dimensions_from_dump $final_dump_file)
|
||||
|
||||
#echo hello?
|
||||
|
||||
# Write to info file.
|
||||
echo "$num_atoms atoms" >> $info_file
|
||||
echo $form_energy_diff >> $info_file
|
||||
echo $box_xyz_initial >> $info_file
|
||||
echo $box_xyz_final >> $info_file
|
||||
echo -e "Coordinations\n-------------" >> $info_file
|
||||
coordination_histogram=$(parse_coordinations $final_dump_file)
|
||||
for coordination_number in $(seq 0 9); do
|
||||
num_with_coordination=$(echo $coordination_histogram|cut -d' ' -f$((coordination_number +1)) )
|
||||
if [[ $num_with_coordination > 0 ]]; then
|
||||
echo -e "\t $coordination_number: $num_with_coordination" >> $info_file
|
||||
fi
|
||||
done
|
||||
|
||||
#echo Hey!
|
||||
|
||||
clean $proc_dir >> /dev/null
|
||||
|
||||
(( count++ ))
|
||||
ratio=$(echo "
|
||||
scale=2
|
||||
count = $count
|
||||
num = $num_info_files
|
||||
count / num
|
||||
" | bc)
|
||||
percent=$(echo $ratio|sed 's/\.//')
|
||||
echo -ne "\r${percent}% complete."
|
||||
done
|
||||
echo -en "\n"
|
||||
|
||||
|
||||
# Compile info files to report.
|
||||
echo Compiling info files to report.
|
||||
report_file=$root_dir/report # $(date +%Y-%m-%d-%H:%M:%S)
|
||||
date > $report_file
|
||||
echo "" >> $report_file
|
||||
sorted_info_files=$(echo $info_files|sort)
|
||||
count=0
|
||||
echo -n "0% complete."
|
||||
for info_file in $sorted_info_files; do
|
||||
cat $info_file >> $report_file
|
||||
echo -en "\n\n" >> $report_file
|
||||
|
||||
(( count++ ))
|
||||
ratio=$(echo "
|
||||
scale=2
|
||||
count = $count
|
||||
num = $num_info_files
|
||||
count / num
|
||||
" | bc)
|
||||
percent=$(echo $ratio|sed 's/\.//')
|
||||
echo -ne "\r${percent}% complete."
|
||||
done
|
||||
echo -en "\n"
|
||||
|
||||
|
92
htdocs/examples/subtract_fortfiles.cpp
Normal file
92
htdocs/examples/subtract_fortfiles.cpp
Normal file
@ -0,0 +1,92 @@
|
||||
#include <fstream>
|
||||
#include <string>
|
||||
#include <iostream>
|
||||
#include <iomanip>
|
||||
|
||||
int main(int argc, char const *argv[])
|
||||
{
|
||||
std::ifstream table1;
|
||||
table1.open(argv[1]);
|
||||
std::ifstream table2;
|
||||
table2.open(argv[2]);
|
||||
|
||||
std::string line;
|
||||
|
||||
int table1_numlines = 0;
|
||||
while (std::getline(table1, line))
|
||||
++table1_numlines;
|
||||
table1.clear();
|
||||
table1.seekg(0);
|
||||
|
||||
int table_entries = table1_numlines - 2;
|
||||
|
||||
// int table2_numlines = 0;
|
||||
// while (std::getline(table1, line))
|
||||
// ++table2_numlines;
|
||||
//
|
||||
// int table2_entries = table2_numlines - 2;
|
||||
// table2.clear();
|
||||
// table2.seekg(0);
|
||||
|
||||
double hden1[table_entries];
|
||||
double phi1[table_entries];
|
||||
double magnitude1[table_entries];
|
||||
double resultant_magnitude[table_entries];
|
||||
|
||||
std::string first_line, second_line, scratch;
|
||||
getline(table1,first_line);
|
||||
getline(table1,second_line);
|
||||
getline(table2,scratch);
|
||||
getline(table2,scratch);
|
||||
|
||||
first_line[0] = argv[3][0];
|
||||
first_line[1] = argv[3][1];
|
||||
first_line[2] = argv[3][2];
|
||||
first_line[3] = argv[3][3];
|
||||
|
||||
for (int i=0; i<table_entries; i++) {
|
||||
table1 >> hden1[i] >> phi1[i] >> magnitude1 [i];
|
||||
}
|
||||
|
||||
|
||||
int i=0;
|
||||
double hden2, phi2, magnitude2;
|
||||
for (int i=0; i<table_entries; i++) {
|
||||
if ( table2.eof() ) {
|
||||
resultant_magnitude[i]=magnitude1[i];
|
||||
}
|
||||
else {
|
||||
table2 >> hden2 >> phi2 >> magnitude2;
|
||||
resultant_magnitude[i] = magnitude1[i] - magnitude2;
|
||||
}
|
||||
if ( resultant_magnitude[i] < 1e-10 )
|
||||
resultant_magnitude[i]=1e-10;
|
||||
}
|
||||
|
||||
|
||||
|
||||
std::cout
|
||||
<< first_line
|
||||
<< std::endl
|
||||
//<< "HDen Phi(H) ScaInten"
|
||||
<< second_line
|
||||
<< std::endl;
|
||||
|
||||
for (int i=0; i<table_entries; i++) {
|
||||
std::cout
|
||||
<< std::fixed
|
||||
<< std::setprecision(3)
|
||||
<< hden1[i]
|
||||
<< " "
|
||||
<< phi1[i]
|
||||
<< " "
|
||||
<< std::scientific
|
||||
<< std::setprecision(4)
|
||||
<< resultant_magnitude[i]
|
||||
<< std::endl;
|
||||
}
|
||||
|
||||
table1.close();
|
||||
table2.close();
|
||||
return 0;
|
||||
}
|
88
htdocs/examples/table_powerlaw_spline_sed.cpp
Normal file
88
htdocs/examples/table_powerlaw_spline_sed.cpp
Normal file
@ -0,0 +1,88 @@
|
||||
#include "agn.hpp"
|
||||
#include "sed.hpp"
|
||||
|
||||
|
||||
// Syntax: table_powerlaw_spline <samples table> <powerlaw coordinates> <output table>
|
||||
namespace agn {
|
||||
bool sed_debug =true;
|
||||
}
|
||||
|
||||
int main(int argc, char const *argv[])
|
||||
{
|
||||
|
||||
if (agn::verbose) std::cout
|
||||
<< "Setting up environment.\n";
|
||||
|
||||
// Create 2d table using n bins, linear values of SED. The
|
||||
// agn sed_powerlaw_spline class has a function for this.
|
||||
int n = 1000;
|
||||
agn::sed_table SED;
|
||||
agn::sed_table samples;
|
||||
agn::sed_table powerlaw_coords;
|
||||
|
||||
const char* sample_filename = argv[1];
|
||||
const char* powerlaw_filename = argv[2];
|
||||
const char* output_filename = argv[3];
|
||||
const char* debug_filename = "spline_sed_debug";
|
||||
|
||||
std::ifstream sample_table(
|
||||
sample_filename,
|
||||
std::ofstream::out
|
||||
);
|
||||
std::ifstream powerlaw_table(
|
||||
powerlaw_filename,
|
||||
std::ofstream::out
|
||||
);
|
||||
std::ofstream output_table(
|
||||
output_filename,
|
||||
std::ofstream::out
|
||||
);
|
||||
std::ofstream debug_file(
|
||||
debug_filename,
|
||||
std::ofstream::out
|
||||
);
|
||||
|
||||
if (agn::verbose) std::cout
|
||||
<< "Creating agn sed object.\n";
|
||||
|
||||
// Read in sampling table and construct a spline model.
|
||||
samples = agn::read_sed_table(sample_table);
|
||||
|
||||
if(agn::sed_debug) debug_file
|
||||
<< "Read "
|
||||
<< samples.table.size()
|
||||
<< " samples:\n"
|
||||
<< format_sed_table(samples);
|
||||
powerlaw_coords = agn::read_sed_table(powerlaw_table);
|
||||
|
||||
if(agn::sed_debug) debug_file
|
||||
<< "Read power coords:\n"
|
||||
<< format_sed_table(powerlaw_coords);
|
||||
|
||||
agn::sed_powerlaw_spline agnsource(samples,powerlaw_coords);
|
||||
|
||||
|
||||
|
||||
|
||||
if(agn::verbose) std::cout
|
||||
<< "Evaluating spectral intensity for "
|
||||
<< n
|
||||
<< " photon energy bins.\n";
|
||||
|
||||
SED = agnsource.histogram_table(n);
|
||||
|
||||
if(agn::verbose) std::cout
|
||||
<< "Printing SED table to file "
|
||||
<< output_filename
|
||||
<< "\n";
|
||||
|
||||
output_table << agn::format_sed_table(SED);
|
||||
|
||||
if(agn::verbose) std::cout
|
||||
<< "Closing files. Goodbye.\n";
|
||||
|
||||
debug_file.close();
|
||||
output_table.close();
|
||||
return 0;
|
||||
}
|
||||
|
86
htdocs/examples/tophat_fft.pl
Executable file
86
htdocs/examples/tophat_fft.pl
Executable file
@ -0,0 +1,86 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#!/usr/bin/env perl
|
||||
|
||||
use feature say;
|
||||
use utf8;
|
||||
use PDL;
|
||||
use PDL::FFT;
|
||||
use PDL::IO::Misc;
|
||||
|
||||
my $_2π = 2*3.1415926539;
|
||||
|
||||
# space parameters
|
||||
our $xres= 0.1;
|
||||
our $xmin = 0;
|
||||
our $xmax = 2500;
|
||||
our $xbins = ($xmax-$xmin)/$xres + 1;
|
||||
|
||||
say "Using $xbins bins";
|
||||
|
||||
# tophat parameters
|
||||
my $μ1=7;
|
||||
my $Δ1=5; # full width
|
||||
|
||||
my @tophat_list = ([$μ1,$Δ1],[28,8],[15,11]);
|
||||
|
||||
our $tophat_count = 0;
|
||||
foreach (@tophat_list) {
|
||||
$tophat_count++;
|
||||
|
||||
# Capture tophat into piddle
|
||||
# Complex Number z(x) = u(x) + iv(x)
|
||||
my $u = tophat(@$_[0],@$_[1]);
|
||||
my $v = zeroes($u);
|
||||
|
||||
$x_coords = $u->xlinvals($xmin,$xmax);
|
||||
|
||||
# Output x-domain tophat
|
||||
wcols $x_coords,$u,"analyses/tables/tophat${tophat_count}.tab";
|
||||
|
||||
# Transform z(x) to Z(x⁻¹) = U(x⁻¹) + iV(x⁻¹)
|
||||
my $U = $u;
|
||||
my $V = $v;
|
||||
fft($U,$V);
|
||||
|
||||
# Determine frequency coordinates
|
||||
my $num_elements = nelem($U);
|
||||
say "Found $num_elements elements.";
|
||||
|
||||
$f = ($num_elements % 2 == 0) ?
|
||||
$U->xlinvals(
|
||||
-(${num_elements}/2-1)/${num_elements}/${xres},
|
||||
1/2/${xres}
|
||||
)->rotate(-(${num_elements}/2 -1))
|
||||
:
|
||||
$U->xlinvals(
|
||||
-(${num_elements}/2-0.5)/${num_elements}/${xres},
|
||||
(${num_elements}/2-0.5)/${num_elements}/${xres}
|
||||
)->rotate(-(${num_elements}-1)/2);
|
||||
|
||||
# Output frequency-domain tophat
|
||||
wcols $f, $U, $V, "fft${tophat_count}.tab";
|
||||
|
||||
# Calculate x offset
|
||||
# This currently multiplies the imaginary component by -1,
|
||||
# and I really need to figure out why this is necessary for
|
||||
# proper output.
|
||||
my $φdiff = atan2(-1*$V,$U);
|
||||
|
||||
wcols $f,$φdiff,"analyses/tables/tophat_φdiff${tophat_count}.tab";
|
||||
|
||||
my $offset = $φdiff/($_2π*$f);
|
||||
|
||||
# Output frequency-domain time delay for given tophat
|
||||
wcols $f,$offset,"analyses/tables/tophat_fft${tophat_count}.tab";
|
||||
}
|
||||
|
||||
sub tophat {
|
||||
(my $mean,my $width) = @_;
|
||||
my $halfwidth = $width/2;
|
||||
my @vals = ();
|
||||
for (my $x_coord = $xmin; $x_coord <= $xmax; $x_coord += $xres) {
|
||||
push @vals, ($x_coord >= ($mean - $halfwidth) &&
|
||||
$x_coord <= ($mean + $halfwidth)) ? 1/$width : 0;
|
||||
}
|
||||
return pdl(@vals);
|
||||
}
|
Loading…
Reference in New Issue
Block a user