Package ComboCode :: Package cc :: Package data :: Package instruments :: Module Pacs
[hide private]
[frames] | no frames]

Source Code for Module ComboCode.cc.data.instruments.Pacs

  1  # -*- coding: utf-8 -*- 
  2   
  3  """ 
  4  Producing Pacs-related output. 
  5   
  6  Author: R. Lombaert 
  7   
  8  """ 
  9   
 10  import os 
 11  import subprocess 
 12  import cPickle 
 13  from glob import glob 
 14  from time import gmtime 
 15  from scipy import array,argsort,interpolate 
 16  import numpy as np 
 17   
 18  import cc.path 
 19  from cc.tools.io import DataIO 
 20  from cc.data.instruments.Instrument import Instrument 
 21  from cc.tools.io import Database 
 22  from cc.data import Data 
 23   
 24   
25 -def compareInts(pp1,pp2):
26 27 ''' 28 Compare integrated line fluxes for an identical target with the same 29 input Hipe line list. 30 31 @param pp1: The first Pacs object with the PACS data and integrated line 32 strengths. 33 @type pp1: Pacs() 34 @param pp2: The second Pacs object with the PACS data and integrated line 35 strengths. 36 @type pp2: Pacs() 37 38 @return: The two one-to-one comparable arrays of line fluxes. 39 @rtype: (arr,arr) 40 41 #pp1 = Pacs.Pacs('waql',6,'codeJun2013',path_linefit='/home/robinl/Data/PACS/waql/lineFit/') 42 #pp2 = Pacs.Pacs('waql',6,'codeJun2013',path_linefit='/home/robinl/Data/PACS/waql/lineFit_normalization/') 43 #wl, fl1, fl2, fl1err, fl2err = Pacs.compareInts(pp1,pp2) 44 #Plotting2.plotCols(x=[wl],y=[fl1/fl2],line_types=['or'],ylogscale=1,ymin=0.75,ymax=1.25,horiz_lines=[0.8,0.9,1.1,1.2]) 45 ''' 46 47 fl1 = [] 48 fl2 = [] 49 fl1err = [] 50 fl2err = [] 51 lf1 = pp1.linefit 52 lf2 = pp2.linefit 53 wl = [] 54 for gi1 in lf1['groupid']: 55 for w1 in lf1['wave_in'][lf1['groupid']==gi1]: 56 if w1 in lf2['wave_in'][lf2['groupid']==gi1]: 57 f1 = lf1['line_flux'][lf1['groupid']==gi1] 58 f2 = lf2['line_flux'][lf2['groupid']==gi1] 59 f1err = lf1['line_flux_err'][lf1['groupid']==gi1] 60 f2err = lf2['line_flux_err'][lf2['groupid']==gi1] 61 i1 = list(lf1['wave_in'][lf1['groupid']==gi1]).index(w1) 62 i2 = list(lf2['wave_in'][lf2['groupid']==gi1]).index(w1) 63 fl1.append(f1[i1]) 64 fl2.append(f2[i2]) 65 fl1err.append(f1err[i1]) 66 fl2err.append(f2err[i2]) 67 wl.append(w1) 68 wl, fl1, fl2, fl1err, fl2err = array(wl), array(fl1), array(fl2), array(fl1err), array(fl2err) 69 asor = argsort(wl) 70 return (wl[asor],fl1[asor],fl2[asor],fl1err[asor],fl2err[asor])
71 72 73
74 -class Pacs(Instrument):
75 76 """ 77 Environment with several tools for Pacs molecular spectroscopy managing. 78 79 """ 80
81 - def __init__(self,star_name,oversampling,path=None,redo_convolution=0,\ 82 intrinsic=1,path_linefit=''):
83 84 ''' 85 Initializing an instance of Pacs(). 86 87 @param star_name: Name of the star from Star.dat 88 @type star_name: string 89 @param oversampling: The PACS instrumental oversampling, for correct 90 convolution of the Sphinx output] 91 @type oversampling: int 92 93 @keyword path: Output folder in the code's home folder. Used to locate 94 the PACS database. If None, it is not used (eg for line 95 measurement matching/identification) 96 97 (default: None) 98 @type path: string 99 @keyword intrinsic: Use the intrinsic Sphinx line profiles for 100 convolving with the spectral resolution? Otherwise 101 the beam convolved line profiles are used. 102 103 (default: 1) 104 @type intrinsic: bool 105 @keyword redo_convolution: if you want to do the convolution of the 106 sphinx models regardless of what's already in 107 the database. The pacs id will not change, nor 108 the entry in the db, and the old convolved 109 model will be copied to a backup 110 111 (default: 0) 112 @type redo_convolution: bool 113 @keyword path_linefit: The folder name for linefit results from Hipe 114 (created by Pierre, assuming his syntax). The 115 folder is located in $path_pacs$/$star_name$/. 116 If no folder is given, no linefits are 117 processed. 118 119 (default: '') 120 @type path_linefit: string 121 122 ''' 123 124 super(Pacs,self).__init__(star_name=star_name,code='GASTRoNOoM',\ 125 path_linefit=path_linefit,path=path,\ 126 oversampling=oversampling,blend_factor=1.2,\ 127 instrument_name='PACS',intrinsic=intrinsic) 128 self.data_wave_list = [] 129 self.data_flux_list = [] 130 self.data_ordernames = [] 131 self.data_orders = [] 132 self.data_delta_list = [] 133 self.redo_convolution = redo_convolution 134 135 if not self.path is None: 136 #-- Convenience path 137 cc.path.gout = os.path.join(cc.path.gastronoom,self.path) 138 #-- Check the path for the PACS database if a model folder is known 139 db_path = os.path.join(cc.path.gout,'stars',self.star_name) 140 if os.path.isdir(db_path): 141 self.db_path = os.path.join(db_path,'GASTRoNOoM_pacs_models.db') 142 self.db = Database.Database(self.db_path) 143 DataIO.testFolderExistence(os.path.join(cc.path.gout,'stars',\ 144 self.star_name,'PACS_results')) 145 else: 146 self.db = None 147 self.db_path = None 148 149 self.sphinx_prep_done = 0 150 self.readLineFit()
151 152
153 - def readLineFit(self):
154 155 ''' 156 Read the data from the line fit procedure done with Pierres Hipe 157 script. 158 159 Assumes structure and syntax as given in the example file 160 /home/robinl/Data/PACS/v1362aql/lineFit/lineFitResults 161 162 The line fit results are saved in self.linefit as a np.recarray. 163 164 The columns include (with unit indicated): 165 groupid 166 band 167 wave_in (micron), 168 wave_fit (micron), 169 line_flux (W/m2), 170 line_flux_err (W/m2), 171 line_flux_rel (%), 172 continuum (W/m2 m), 173 line_peak (W/m2 m), 174 fwhm_fit (micron), 175 fwhm_pacs (micron), 176 fwhm_rel 177 178 ''' 179 180 kwargs = dict([('start_from_keyword','GROUPID'),\ 181 ('start_row',1)]) 182 dd = super(Pacs,self).readLineFit(**kwargs) 183 if dd is None: return 184 185 #-- If no % symbol in 6th last column, new way of giving int ints: 186 # get rid of last 2 columns which do not contain relevant information 187 # as well as the wave ratio column 188 if not isinstance(dd[-6][0],str): 189 del dd[-1] 190 del dd[-1] 191 del dd[-10] 192 #-- Convert relative error in % to ratio 193 dd[-6] = [float(val.strip('%'))/100. for val in dd[-6]] 194 del dd[-8] 195 #bands = [] 196 #obs_bands = set([(band,obsid) 197 #for band,obsid in zip(dd[-11],dd[-13]) 198 #if 'R1' not in band]) 199 #for ival,val in enumerate(dd[-11]): 200 #if 'R1' in val: 201 #thisid = dd[-13][ival] 202 #b2b = ('B2B',thisid) 203 #b2a = ('B2A',thisid) 204 #else: 205 #bands.append(val) 206 del dd[1:-11] 207 names = ('groupid','band','wave_in','wave_fit','line_flux',\ 208 'line_flux_err','line_flux_rel','continuum','line_peak',\ 209 'fwhm_fit','fwhm_pacs','fwhm_rel') 210 self.linefit = np.recarray(shape=(len(dd[-1]),),\ 211 dtype=zip(names,[int]+['|S3']+[float]*10)) 212 for n,d in zip(names,dd): 213 self.linefit[n] = d
214 215 216
217 - def addStarPars(self,star_grid):
218 219 ''' 220 Set parameters taken from the PACS database into empty parameter sets. 221 222 Typically used in conjunction with making Star() templates through 223 Star.makeStars(). 224 225 @param star_grid: The parameter sets that will updated in situ 226 @type star_grid: list[Star()] 227 228 ''' 229 230 for star in star_grid: 231 model = star['LAST_PACS_MODEL'] 232 pacs_model = self.db[model].copy() 233 star.update({'LAST_GASTRONOOM_MODEL': pacs_model['cooling_id'],\ 234 'MOLECULE': list(set(\ 235 [(db_entry[2]['TRANSITION'].split()[0],\ 236 db_entry[1]) 237 for db_entry in pacs_model['trans_list']])),\ 238 'TRANSITION': \ 239 [db_entry[2]['TRANSITION'].split() + \ 240 [db_entry[2]['N_QUAD']] 241 for db_entry in pacs_model['trans_list']]}) 242 [trans.setModelId(db_entry[0]) 243 for trans,db_entry in zip(star['GAS_LINES'],\ 244 pacs_model['trans_list'])]
245 246 247
248 - def setData(self,data_filenames=[],searchstring=''):
249 250 ''' 251 Read data from given filenames and remember them. 252 253 If no filenames given, an auto search is performed. 254 255 The data are saved as wavelength and flux lists in the object. 256 257 @keyword data_filenames: The data filenames. If empty, auto-search is 258 done 259 260 (default: []) 261 @type data_filenames: list[string] 262 @keyword searchstring: the searchstring conditional for the auto-search 263 264 (default: '') 265 @type searchstring: string 266 267 ''' 268 269 super(Pacs,self).setData(data_filenames, searchstring) 270 if self.data_filenames: 271 self.data_orders = [int(o[1]) for o in self.data_ordernames]
272 273 274
275 - def prepareSphinx(self,star_grid,redo_sphinx_prep=0):
276 277 ''' 278 Prepare Sphinx PACS models by checking if the Star() instance already 279 has a PACS id associated with it, and if not calling convolveSphinx. 280 281 @param star_grid: The parameter sets 282 @type star_grid: list[Star()] 283 @keyword redo_sphinx_prep: redo the sphinx prep, regardless of whether 284 this Pacs instance already did it once (for 285 instance in case the star_grid changed) 286 287 (default: 0) 288 @type redo_sphinx_prep: bool 289 290 ''' 291 292 if not self.sphinx_prep_done or redo_sphinx_prep: 293 print '** Loading from database, or convolving with ' + \ 294 'Gaussian and rebinning to data wavelength grid.' 295 for i,star in enumerate(star_grid): 296 print '* Sphinx model %i out of %i.' %(i+1,len(star_grid)) 297 if not star['LAST_GASTRONOOM_MODEL']: 298 print '* No cooling model found.' 299 else: 300 self.__convolveSphinx(star=star) 301 if star['LAST_PACS_MODEL']: 302 print '* %s is done!'%star['LAST_PACS_MODEL'] 303 self.sphinx_prep_done = 1 304 self.db.sync()
305 306 307
308 - def getSphinxConvolution(self,star,fn):
309 310 ''' 311 Read the sphinx convolution and return if it has already been done. 312 313 Returns None if the convolution is not available. 314 315 @param star: The Star() object 316 @type star: Star() 317 @param fn: The filename of the dataset (band) for which the convolution 318 is to be returned. 319 @type fn: str 320 321 @return: The sphinx convolution result. (wavelength, flux) 322 @rtype: array 323 324 ''' 325 326 this_id = star['LAST_PACS_MODEL'] 327 if not this_id: 328 return ([],[]) 329 fn = os.path.split(fn)[1] 330 sphinx_file = os.path.join(cc.path.gout,'stars',self.star_name,\ 331 'PACS_results',this_id,'%s_%s'%('sphinx',fn)) 332 return DataIO.readCols(sphinx_file)
333 334 335
336 - def __convolveSphinx(self,star):
337 338 ''' 339 Check if sphinx output has already been convolved (pacs db) and do so 340 if not. 341 342 @param star: The parameter set 343 @type star: Star() 344 345 ''' 346 347 #- check for which filenames the convolution has already been done 348 finished_conv_filenames = self.checkStarDb(star) 349 finished_conv_filenames = [this_f 350 for this_f in finished_conv_filenames 351 if this_f in [os.path.split(f)[1] 352 for f in self.data_filenames]] 353 354 #- if after db check pacs id is undefined, then there is no pacs model, 355 #- and the convolution will be done anyway 356 if self.redo_convolution and star['LAST_PACS_MODEL']: 357 for filename in finished_conv_filenames: 358 ori = os.path.join(cc.path.gout,'stars',self.star_name,\ 359 'PACS_results',star['LAST_PACS_MODEL'],\ 360 '_'.join(['sphinx',filename])) 361 backup = os.path.join(cc.path.gout,'stars',self.star_name,\ 362 'PACS_results',star['LAST_PACS_MODEL'],\ 363 '_'.join(['backup','sphinx',filename])) 364 subprocess.call(['mv %s %s'%(ori,backup)],shell=True) 365 self.db[star['LAST_PACS_MODEL']]['filenames'] = [] 366 finished_conv_filenames = [] 367 filenames_to_do = [this_f 368 for this_f in [os.path.split(f)[1] 369 for f in self.data_filenames] 370 if this_f not in finished_conv_filenames] 371 372 #-Get sphinx model output and merge, for all star models in star_grid 373 if filenames_to_do: 374 #- if list is not empty, some filenames still need a convolution 375 print '* Reading Sphinx model and merging.' 376 merged = star['LAST_GASTRONOOM_MODEL'] \ 377 and self.mergeSphinx(star) \ 378 or [[],[]] 379 sphinx_wave = merged[0] 380 sphinx_flux = merged[1] 381 if not sphinx_wave: 382 print '* No Sphinx data found.' 383 return 384 385 #- convolve the model fluxes with a gaussian at central wavelength 386 #- from data_wave_list for every star, and appropriate sigma 387 print '* Convolving Sphinx model, after correction for v_lsr.' 388 if not self.data_delta_list: 389 self.setDataResolution() 390 for filename in filenames_to_do: 391 i_file = [os.path.split(f)[1] 392 for f in self.data_filenames].index(filename) 393 if not star['LAST_PACS_MODEL']: 394 star['LAST_PACS_MODEL'] = \ 395 'pacs_%.4i-%.2i-%.2ih%.2i-%.2i-%.2i'\ 396 %(gmtime()[0],gmtime()[1],gmtime()[2],\ 397 gmtime()[3],gmtime()[4],gmtime()[5]) 398 self.db[star['LAST_PACS_MODEL']] = \ 399 dict([('filenames',[]),\ 400 ('trans_list',star.getTransList(dtype='PACS')),\ 401 ('cooling_id',star['LAST_GASTRONOOM_MODEL'])]) 402 DataIO.testFolderExistence(\ 403 os.path.join(cc.path.gout,'stars',self.star_name,\ 404 'PACS_results',star['LAST_PACS_MODEL'])) 405 #-- Correct for the v_lsr of the central source 406 sphinx_wave_corr = array(sphinx_wave)*(1./(1-self.vlsr/self.c)) 407 sph_conv = Data.doConvolution(\ 408 x_in=sphinx_wave_corr,\ 409 y_in=sphinx_flux,\ 410 x_out=self.data_wave_list[i_file],\ 411 widths=self.data_delta_list[i_file],\ 412 oversampling=self.oversampling) 413 sph_fn = os.path.join(cc.path.gout,'stars',self.star_name,\ 414 'PACS_results',star['LAST_PACS_MODEL'],\ 415 '_'.join(['sphinx',filename])) 416 DataIO.writeCols(filename=sph_fn,\ 417 cols=[self.data_wave_list[i_file],sph_conv]) 418 self.db[star['LAST_PACS_MODEL']]['filenames'].append(filename) 419 self.db.addChangedKey(star['LAST_PACS_MODEL'])
420 421 #- Idea: 422 #- 1) grab model flux and wavelength, data flux and wavelengthm, 423 #- instrument intrinsic resolution and wavelength 424 #- 2) scan model fluxes with gaussian window of variable sigma, 425 #- which is equal to intrinsic resolution at gaussian center0 426 #- and gaussian center at wavelengths in the data arrays, 427 #- which may or may not be spaced by the intrinsic resolution; 428 #- take sigma = intrinsic_resolution/delta(lambda) * len(lambda) 429 #- where lambda is [abs(l_model-l_instr)<intrinsic_resolution] 430 #- 3) the list found has the same wavelengths as the data input and can 431 #- be directly compared 432 433 434
435 - def getPacsResolution(self):
436 437 ''' 438 Get the Pacs resolution from cc aux file for all orders. 439 440 @return: The resolution as a function of wavelength for the 3 orders 441 @rtype: (list[list],list[list]) 442 443 ''' 444 445 reso_wave_list = [DataIO.getInputData(path=cc.path.aux,\ 446 filename='Pacs_Resolution.dat',\ 447 keyword='WAVE='+str(order)) 448 for order in range(1,4)] 449 reso_delta_list = [DataIO.getInputData(path=cc.path.aux,\ 450 filename='Pacs_Resolution.dat',\ 451 keyword='ORDER='+str(order)) 452 for order in range(1,4)] 453 return reso_wave_list,reso_delta_list
454 455 456
457 - def setDataResolution(self):
458 459 ''' 460 Read and remember the PACS resolution, based on the wavelength grid of 461 the data. 462 463 Returns a list of deltas for every filename, within a list. 464 465 ''' 466 467 print '** Reading PACS native resolution.' 468 reso_wave_list,reso_delta_list = self.getPacsResolution() 469 470 #- Make interpolators for every order 471 print '** Interpolating PACS native resolution.' 472 interpolator_list = [interpolate.interp1d(x,y) 473 for x,y in zip(reso_wave_list,reso_delta_list)] 474 475 #- Interpolate the PACS resolution for appropriate order to get sigma's 476 #- for data wavelengths 477 self.data_delta_list = [interpolator_list[order-1](x_new) 478 for x_new,order in zip(self.data_wave_list,\ 479 self.data_orders)]
480 481 482
483 - def checkStarDb(self,star):
484 485 ''' 486 Check if for this star the convolution has already been done. 487 488 @param star: The parameter set 489 @type star: Star() 490 491 @return: The filenames of PACS convolutions found for this parameter 492 set. Can be multiple filenames if different bands associated 493 with this set. 494 @rtype: list[string] 495 496 ''' 497 498 if star['LAST_PACS_MODEL']: 499 return self.db[star['LAST_PACS_MODEL']]['filenames'] 500 else: 501 #selection = [(k,v) for k,v in self.db.items() 502 # if v['cooling_id'] == star['LAST_GASTRONOOM_MODEL'] \ 503 # and self.compareTransLists(starlist=star.getTransList(),dblist=v['trans_list'])] 504 selection = [(k,v) 505 for k,v in self.db.items() 506 if v['cooling_id'] == star['LAST_GASTRONOOM_MODEL'] \ 507 and star.getTransList(dtype='PACS') == v['trans_list']] 508 if len(selection) == 1: 509 star['LAST_PACS_MODEL'] = selection[0][0] 510 return selection[0][1]['filenames'] 511 elif len(selection) > 1: 512 raise IOError('Multiple PACS ids found for a single star. ' + \ 513 'Something very fishy is going on... Contact ' +\ 514 'Robin with this error message!') 515 else: 516 return []
517