1
2
3 """
4 A simple interface to work with a database saved on the hard disk.
5
6 Author: Robin Lombaert
7
8 """
9
10 import os
11 import cPickle
12 import time
13 import subprocess
14 import portalocker
15 from glob import glob
16
17 import cc.path
18 from cc.tools.io import DataIO
19
20
21
23
24 '''
25 Update all databases located in any of your modeling folders in
26 cc.path.gastronoom.
27
28 Choose the method to run as well as the database to update, and add the args
29 and kwargs for that method.
30
31 This method synchronizes the databases, so run with care.
32
33 @param func: The method to use for updating a database
34 @type func: function
35 @param db_name: Name of the database to be updated
36 @type db_name: str
37
38 '''
39
40
41 gpaths = sorted(glob(os.path.join(cc.path.gastronoom,'*','GASTRoNOoM*.db')))
42 gpaths = list(set([os.path.split(gp)[0] for gp in gpaths]))
43
44
45 for gp in gpaths:
46
47
48 fn = os.path.join(gp,db_name)
49 print "******************************"
50 print "Now converting database at:"
51 print fn
52
53
54 db = func(db_fn=fn,*args,**kwargs)
55 db.sync()
56
57
58
60
61 '''
62 Convert your local databases such that some keywords are shifted around
63 according to their relevance for the different subcodes of GASTRoNOoM.
64
65 Moves USE_MASER_IN_SPHINX from sphinx to mline. Sets it to the default value
66 of 1, since it was never changed for mline.
67
68 Moves USE_NO_MASER_OPTION, N_FREQ, START_APPROX, USE_FRACTION_LEVEL_CORR,
69 FRACTION_LEVEL_CORR, NUMBER_LEVEL_MAX_CORR from cooling to mline.
70
71 Moves WRITE_INTENSITIES, TAU_MAX, TAU_MIN, CHECK_TAU_STEP from cooling to
72 sphinx.
73
74 Adds the new keywords N_IMPACT_EXTRA[_RIN/ROUT] to cooling.
75
76 Adds the new keywords FRACTION_TAU_STEP, MIN_TAU_STEP to sphinx.
77
78 Adds the new keyword FEHLER to mline.
79
80 Converts any 'double' notation in str format to the floats, e.g.
81 TAU_MIN='-6d0' becomes TAU_MIN=-6.
82
83 Converts all cooling, mline, sphinx and pacs databases found in the
84 GASTRoNOoM home folder.
85
86 Can Update ComboCode input files for the maser keywords.
87
88 @keyword path_input: The location of your ComboCode inputfiles. Use empty
89 string or None if you do not want to update your input-
90 files automatically. Is directly inserted into glob so
91 takes wildcards, eg /Users/robinl/ComboCode/input/*.dat
92
93 (default: '')
94 @type path_input: str
95
96 '''
97
98
99 keys_cool_ml = [('USE_NO_MASER_OPTION',0,int),('N_FREQ',30,int),\
100 ('START_APPROX',0,int),('USE_FRACTION_LEVEL_CORR',1,int),\
101 ('FRACTION_LEVEL_CORR',0.8,float),\
102 ('NUMBER_LEVEL_MAX_CORR',1e-12,float)]
103 keys_cool_sph = [('WRITE_INTENSITIES',0,int),('TAU_MAX',12,float),\
104 ('TAU_MIN',-6,float),('CHECK_TAU_STEP',0.01,float)]
105
106
107
108
109
110
111
112 new_keys_ml = [('FEHLER',1e-4,float),('USE_MASER_IN_SPHINX',1,int)]
113 new_keys_sph = [('FRACTION_TAU_STEP',1e-2,float),\
114 ('MIN_TAU_STEP',1e-4,float)]
115 new_keys_cool = [('N_IMPACT_EXTRA',0,int),\
116 ('N_IMPACT_EXTRA_RIN',100.,float),\
117 ('N_IMPACT_EXTRA_ROUT',150.,float)]
118 convert_double = ['TAU_MAX','TAU_MIN','NUMBER_LEVEL_MAX_CORR',\
119 'STEP_RS_RIN','STEP_RIN_ROUT','FRACTION_TAU_STEP',\
120 'MIN_TAU_STEP']
121
122 gpaths = sorted(glob(os.path.join(cc.path.gastronoom,'*','GASTRoNOoM*.db')))
123 gpaths = list(set([os.path.split(gp)[0] for gp in gpaths]))
124 print "New databases will be located at filename_old.db_dbConversion"
125
126 for gp in gpaths:
127 cfn = os.path.join(gp,'GASTRoNOoM_cooling_models.db')
128 mfn = os.path.join(gp,'GASTRoNOoM_mline_models.db')
129 sfn = os.path.join(gp,'GASTRoNOoM_sphinx_models.db')
130 pfns = sorted(glob(os.path.join(gp,'stars','*','GASTRoNOoM*.db')))
131
132 print "******************************"
133 print "Now converting databases from:"
134 print "\n".join([cfn,mfn,sfn]+pfns)
135
136
137
138 for fn in [cfn,mfn,sfn]+pfns:
139 os.system('cp %s %s'%(fn,fn+'_dbConversion'))
140 cdb = Database(cfn+'_dbConversion')
141 mdb = Database(mfn+'_dbConversion')
142 sdb = Database(sfn+'_dbConversion')
143
144
145 for key,defval,valtype in new_keys_cool:
146
147
148 cdb = addKeyCooling(key=key,val=defval,db=cdb)
149
150
151 for cmid in cdb.keys():
152 for k,v in cdb[cmid].items():
153 if k in convert_double and isinstance(v,str):
154 cdb[cmid][k] = v.replace('d','e')
155 cdb.addChangedKey(cmid)
156
157
158
159
160 for key,defval,valtype in keys_cool_ml + new_keys_ml:
161 for cmid in cdb.keys():
162 if not key in cdb[cmid].keys():
163
164
165 mdb = addKeyMline(key=key,val=defval,db=mdb,id=cmid)
166 continue
167 val = cdb[cmid].pop(key)
168 cdb.addChangedKey(cmid)
169 if key in convert_double:
170 val = float(val.replace('d','e'))
171 mdb = addKeyMline(key=key,val=valtype(val),db=mdb,id=cmid)
172
173
174
175
176 for key,defval,valtype in keys_cool_sph + new_keys_sph:
177 for cmid in cdb.keys():
178 if not key in cdb[cmid].keys():
179
180
181 sdb = addKeySphinx(key=key,val=defval,db=sdb,id=cmid)
182
183 for pp in pfns:
184 pdb = addKeyPacs(key=key,val=defval,\
185 db_fn=pp+'_dbConversion',id=cmid)
186 pdb.sync()
187 continue
188 val = cdb[cmid].pop(key)
189 cdb.addChangedKey(cmid)
190 if key in convert_double:
191 val = float(val.replace('d','e'))
192 sdb = addKeySphinx(key=key,val=valtype(val),db=sdb,id=cmid)
193
194 for pp in pfns:
195 pdb = addKeyPacs(key=key,val=valtype(val),\
196 db_fn=pp+'_dbConversion',id=cmid)
197 pdb.sync()
198
199
200 sdb = rmKeySphinx(key='USE_MASER_IN_SPHINX',db=sdb)
201
202 for pp in pfns:
203 pdb = rmKeyPacs(key='USE_MASER_IN_SPHINX',db_fn=pp+'_dbConversion')
204 pdb.sync()
205
206 cdb.sync()
207 mdb.sync()
208 sdb.sync()
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233 if not path_input: return
234 ifiles = glob(path_input)
235 comment = '# set to 1 if one wants to omit masers occuring when solving '+\
236 'the radiative transfer equation\n'
237 for ff in ifiles:
238 lines = DataIO.readFile(ff,None,replace_spaces=0)
239 ldict = DataIO.readDict(ff)
240 for i,l in enumerate(lines):
241 if l.find('USE_MASER_IN_SPHINX') != -1:
242 k = 'USE_MASER_IN_SPHINX=1 '+l[l.find('#'):]
243 lines[i] = k
244 break
245 if not ldict.has_key('USE_NO_MASER_OPTION'):
246 lines[i:i] = ['USE_NO_MASER_OPTION=1 '+comment]
247 DataIO.writeFile(ff,lines,mode='w',delimiter='')
248
249
250
252
253 '''
254 Update dust filenames in MCMax database with the new OPAC_PATH system.
255
256 @param filename: The file and path to the MCMax database.
257 @type filename: str
258
259 '''
260
261 i = 0
262 new_filename = '%s_new'%(filename)
263
264 db_old = Database(filename)
265 db_new = Database(new_filename)
266
267 path = os.path.join(cc.path.usr,'Dust_updatefile.dat')
268 dustfiles = DataIO.readCols(path)
269 pfn_old = list(dustfiles[0])
270 pfn_new = list(dustfiles[1])
271
272 for k,v in db_old.items():
273 dd = v['dust_species']
274 dd_new = dict()
275 for pfn,cont in dd.items():
276 try:
277 new_key = pfn_new[pfn_old.index(pfn)]
278 dd_new[new_key] = cont
279 except ValueError:
280 dd_new[pfn] = cont
281 v['dust_species'] = dd_new
282 db_new[k] = v
283 db_new.sync()
284
285
286
288
289 '''
290 Convert MCMax database to the dict format.
291
292 This change was made to speed up the use of the database and makes use of
293 the Database() class.
294
295 @param path_mcmax: the name of the MCMac subfolder.
296 @type path_mcmax: string
297
298 '''
299
300 print '** Converting MCMax database to dictionary format...'
301
302 cc.path.mout = os.path.join(cc.path.mcmax,path_mcmax)
303 db_path = os.path.join(cc.path.mout,'MCMax_models.db')
304 i = 0
305 backup_file = '%s_backup%i'%(db_path,i)
306 while os.path.isfile(backup_file):
307 i += 1
308 backup_file = '%s_backup%i'%(db_path,i)
309 subprocess.call(['mv %s %s'%(db_path,backup_file)],\
310 shell=True,stdout=subprocess.PIPE)
311 mcmax_db = open(backup_file,'r')
312 old_db = []
313 try:
314 while True:
315 model = cPickle.load(mcmax_db)
316 old_db.append(model)
317 except EOFError:
318 print '** End of old database reached.'
319 finally:
320 mcmax_db.close()
321 db = Database(db_path)
322 print '** Inserting entries into the new database.'
323 for commands,model_id in old_db:
324 photon_count = DataIO.convertFloat(commands.pop(0),convert_int=1)
325 commands = [c.split('=') for c in commands]
326 commanddict = dict([(k,DataIO.convertFloat(v,convert_int=1))
327 for k,v in commands
328 if k[0:4] not in ['opac','part','abun',\
329 'Tdes','minr','maxr']])
330
331 if commanddict.has_key('densfile'):
332 commanddict['densfile'] = "'%s'"%commanddict['densfile']
333 if not commanddict.has_key('FLD'):
334 commanddict['FLD'] = '.false.'
335 if not commanddict.has_key('randomwalk'):
336 commanddict['randomwalk'] = '.true.'
337
338 commanddict['photon_count'] = photon_count
339 speciespars = [(k,DataIO.convertFloat(v,convert_int=1))
340 for k,v in commands
341 if k[0:4] in ['opac','part','abun',\
342 'Tdes','minr','maxr']]
343 commanddict['dust_species'] = dict()
344 i = 0
345 while speciespars:
346 i += 1
347 this_species = dict()
348 for k,v in speciespars:
349 if int(k[-2:]) == i:
350 if k[:4] == 'part' or k[:4] == 'opac':
351 speciesfile = v.strip("'")
352 else:
353 this_species[k.strip('%.2i'%i)] = v
354 commanddict['dust_species'][os.path.split(speciesfile)[1]] \
355 = this_species
356 speciespars = [(k,v) for k,v in speciespars
357 if int(k[-2:]) != i]
358 db[model_id] = commanddict
359 db.sync()
360 print '** Done!'
361
362
364
365 '''
366 Remove any db entries with a dictionary that includes the IN_PROGRESS key.
367
368 Works for cooling, mline and sphinx databases.
369
370 @param db_path: full path to the database.
371 @type db_path: string
372
373 '''
374
375 code = os.path.split(db_path)[1].split('_')[-2]
376 if code not in ['cooling','mline','sphinx','MCMax','Chemistry']:
377 raise IOError('Database path is not related to a GASTRoNOoM or MCMax '+\
378 'database.')
379 db = Database(db_path)
380
381 dbfile = db._open('r')
382 print '****************************************************************'
383 print '** Checking {} database for in-progress models now...'.format(code)
384 for cool_id,vcool in db.items():
385
386 if code in ['cooling','MCMax','Chemistry']:
387 if vcool.has_key('IN_PROGRESS'):
388 del db[cool_id]
389 print 'Removed in-progress model with id {}.'.format(cool_id)
390 continue
391 for ml_id,vml in vcool.items():
392 for key,val in vml.items():
393
394 if code == 'mline':
395 if val.has_key('IN_PROGRESS'):
396 del db[cool_id][ml_id][key]
397 db.addChangedKey(cool_id)
398 print 'Removed in-progress molecule {} '.format(key)+\
399 'id {}.'.format(ml_id)
400 continue
401
402 for trans,vsph in val.items():
403
404
405 if vsph.has_key('IN_PROGRESS'):
406 del db[cool_id][ml_id][key][trans]
407 db.addChangedKey(cool_id)
408 print 'Removed in-progress transition '+ \
409 '{} with id {}.'.format(trans,key)
410
411
412
413 if code == 'sphinx' and not db[cool_id][ml_id][key].keys():
414 del db[cool_id][ml_id][key]
415 db.addChangedKey(cool_id)
416 print 'Removed empty trans id {}.'.format(key)
417
418
419
420 if code == 'mline' and not db[cool_id][ml_id].keys():
421 del db[cool_id][ml_id]
422 db.addChangedKey(cool_id)
423 print 'Removed empty trans id {}.'.format(ml_id)
424
425 print '** Unlocking and synchronizing the database...'
426 dbfile.close()
427 db.sync()
428 print '** Done!'
429 print '****************************************************************'
430
431
432
434
435 '''
436 Reconstruct a cooling database based on the mline database and the
437 GASTRoNOoM inputfiles.
438
439 Only works if the water MOLECULE convenience keywords, the MOLECULE R_OUTER
440 and/or the MOLECULE ENHANCE_ABUNDANCE_FACTOR keywords were not adapted!
441
442 @param path_gastronoom: The path_gastronoom to the output folder
443 @type path_gastronoom: string
444
445 @keyword r_outer: The outer radius used for the cooling model, regardless
446 of the outer_r_mode parameter.
447
448 (default: None)
449 @type r_outer: float
450
451 '''
452
453
454 cc.path.gout = os.path.join(cc.path.gastronoom,path_gastronoom)
455
456 coolkeys_path = os.path.join(cc.path.aux,'Input_Keywords_Cooling.dat')
457 coolkeys = DataIO.readCols(coolkeys_path,make_float=0,make_array=0)[0]
458 extra_keys = ['ENHANCE_ABUNDANCE_FACTOR','MOLECULE_TABLE','ISOTOPE_TABLE',\
459 'ABUNDANCE_FILENAME','NUMBER_INPUT_ABUNDANCE_VALUES',\
460 'KEYWORD_TABLE']
461 coolkeys = [k for k in coolkeys if k not in extra_keys]
462 cool_db_path = os.path.join(cc.path.gout,'GASTRoNOoM_cooling_models.db')
463 ml_db_path = os.path.join(cc.path.gout,'GASTRoNOoM_mline_models.db')
464 subprocess.call(['mv %s %s_backupCoolDbRetrieval'\
465 %(cool_db_path,cool_db_path)],shell=True)
466 cool_db = Database(db_path=cool_db_path)
467 ml_db = Database(db_path=ml_db_path)
468 for ml_id in ml_db.keys():
469 file_path = os.path.join(cc.path.gout,'models',\
470 'gastronoom_%s.inp'%ml_id)
471 input_dict = DataIO.readDict(file_path)
472 input_dict = dict([(k,v) for k,v in input_dict.items()
473 if k in coolkeys])
474 cool_db[ml_id] = input_dict
475 if not r_outer is None:
476 cool_db[ml_id]['R_OUTER'] = r_outer
477 cool_db.sync()
478
479
480
482
483 '''
484 Add a (key,value) pair to every entry in the cooling database.
485
486 Not added to a particular entry if already present.
487
488 @param key: The name of the keyword to be added.
489 @type key: str
490 @param val: The default value for the keyword.
491 @type val: any
492
493 @keyword db_fn: The filename and path of the database. Only required if db
494 is not given.
495
496 (default: '')
497 @type db_fn: string
498 @keyword db: The database. Is updated and returned. If not given, a filename
499 is required.
500
501 (default: None)
502 @type db: Database()
503
504 @return: The new database, not yet synchronized.
505 @rtype: Database()
506
507 '''
508
509 if db is None and not db_fn:
510 return
511
512 if db is None:
513 db = Database(db_fn)
514 for k in db.keys():
515 if not key in db[k].keys():
516 db[k][key] = val
517 db.addChangedKey(k)
518 return db
519
520
521
523
524 '''
525 Remove a key from every entry in the cooling database.
526
527 @param key: The name of the keyword to be removed.
528 @type key: str
529
530 @keyword db_fn: The filename and path of the database. Only required if db
531 is not given.
532
533 (default: '')
534 @type db_fn: string
535 @keyword db: The database. Is updated and returned. If not given, a filename
536 is required.
537
538 (default: None)
539 @type db: Database()
540
541 @return: The new database, not yet synchronized.
542 @rtype: Database()
543
544 '''
545
546 if db is None and not db_fn:
547 return
548
549 if db is None:
550 db = Database(db_fn)
551 for k in db.keys():
552 if key in db[k].keys():
553 del db[k][key]
554 db.addChangedKey(k)
555 return db
556
557
558
560
561 '''
562 Add a (key,value) pair to every entry in the mline database.
563
564 Not added to a particular entry if already present.
565
566 @param key: The keyword to be added
567 @type key: str
568 @param val: The default value of the keyword
569 @type val: any
570
571 @keyword db_fn: The filename and path of the database. Only required if db
572 is not given.
573
574 (default: '')
575 @type db_fn: string
576 @keyword db: The database. Is updated and returned. If not given, a filename
577 is required.
578
579 (default: None)
580 @type db: Database()
581 @keyword id: If the (key,val) pair is only to be added to one cooling id,
582 give that id here. If not given, the pair is added to all ids
583 If id not in db, nothing is done.
584
585 (default: '')
586 @type id: str
587
588 @return: The new database, not yet synchronized.
589 @rtype: Database()
590
591 '''
592
593 if db is None and not db_fn:
594 return
595
596 if db is None:
597 db = Database(db_fn)
598
599 if id and not db.has_key(id):
600 return db
601
602 cids = db.keys() if not id else [id]
603 for k in cids:
604 for l in db[k].keys():
605 for mol in db[k][l].keys():
606 if not key in db[k][l][mol].keys():
607 db[k][l][mol][key] = val
608 db.addChangedKey(k)
609 return db
610
611
612
614
615 '''
616 Remove a key from every entry in the mline database.
617
618 @param key: The keyword to be removed
619 @type key: str
620
621 @keyword db_fn: The filename and path of the database. Only required if db
622 is not given.
623
624 (default: '')
625 @type db_fn: string
626 @keyword db: The database. Is updated and returned. If not given, a filename
627 is required.
628
629 (default: None)
630 @type db: Database()
631 @keyword id: If the key is only to be removed from one cooling id, give that
632 id here. If not given, the key is removed from all ids
633 If id not in db, nothing is done.
634
635 (default: '')
636 @type id: str
637
638 @return: The new database, not yet synchronized.
639 @rtype: Database()
640
641 '''
642
643 if db is None and not db_fn:
644 return
645
646 if db is None:
647 db = Database(db_fn)
648
649 if id and not db.has_key(id):
650 return db
651
652 cids = db.keys() if not id else [id]
653 for k in cids:
654 for l in db[k].keys():
655 for mol in db[k][l].keys():
656 if key in db[k][l][mol].keys():
657 del db[k][l][mol][key]
658 db.addChangedKey(k)
659 return db
660
661
662
664
665 '''
666 Remove a key from the sphinx database entries.
667
668 @param key: They keyword to be removed
669 @type key: str
670
671 @keyword db_fn: The filename and path of the database. Only required if db
672 is not given.
673
674 (default: '')
675 @type db_fn: string
676 @keyword db: The database. Is updated and returned. If not given, a filename
677 is required.
678
679 (default: None)
680 @type db: Database()
681 @keyword id: If the key is only to be removed from one cooling id, give that
682 id here. If not given, the key is removed from all ids
683 If id not in db, nothing is done.
684
685 (default: '')
686 @type id: str
687
688 @return: The new database, not yet synchronized.
689 @rtype: Database()
690
691 '''
692
693 if db is None and not db_fn:
694 return
695
696 if db is None:
697 db = Database(db_fn)
698
699 if id and not db.has_key(id):
700 return db
701
702 cids = db.keys() if not id else [id]
703 for k in cids:
704 for l in db[k].keys():
705 for o in db[k][l].keys():
706 for trans in db[k][l][o].keys():
707 if key in db[k][l][o][trans].keys():
708 del db[k][l][o][trans][key]
709 db.addChangedKey(k)
710 return db
711
712
713
715
716 '''
717 Add a (key,value) pair to every entry of the sphinx database entries.
718
719 @param key: They keyword to be added
720 @type key: str
721 @param val: The default value of the keyword
722 @type val: any
723
724 @keyword db_fn: The filename and path of the database. Only required if db
725 is not given.
726
727 (default: '')
728 @type db_fn: string
729 @keyword db: The database. Is updated and returned. If not given, a filename
730 is required.
731
732 (default: None)
733 @type db: Database()
734 @keyword id: If the (key,val) pair is only to be added to one cooling id,
735 give that id here. If not given, the pair is added to all ids
736 If id not in db, nothing is done.
737
738 (default: '')
739 @type id: str
740
741 @return: The new database, not yet synchronized.
742 @rtype: Database()
743
744 '''
745
746 if db is None and not db_fn:
747 return
748
749 if db is None:
750 db = Database(db_fn)
751
752 if id and not db.has_key(id):
753 return db
754
755 cids = db.keys() if not id else [id]
756 for k in cids:
757 for l in db[k].keys():
758 for o in db[k][l].keys():
759 for trans in db[k][l][o].keys():
760 if not key in db[k][l][o][trans].keys():
761 db[k][l][o][trans][key] = val
762 db.addChangedKey(k)
763 return db
764
765
766
768
769 '''
770 Remove a key from the PACS database entries.
771
772 @param key: They keyword to be removed
773 @type key: str
774
775 @keyword db_fn: The filename and path of the database. Only required if db
776 is not given.
777
778 (default: '')
779 @type db_fn: string
780 @keyword db: The database. Is updated and returned. If not given, a filename
781 is required.
782
783 (default: None)
784 @type db: Database()
785 @keyword id: If the key is only to be removed from one cooling id, give that
786 id here. Note this is the COOLING ID, not the pacs id.
787 Iteration over all pacs ids is always done. However, if id is
788 given it is cross checked with the cooling id in the entry and
789 only then added. If not given, the key is removed from all
790 pacs ids as well as cooling ids.
791 If cooling id not in db, nothing is done.
792
793 (default: '')
794 @type id: str
795
796 @return: The new database, not yet synchronized.
797 @rtype: Database()
798
799 '''
800
801 if db is None and not db_fn:
802 return
803
804 if db is None:
805 db = Database(db_fn)
806
807 for k in db.keys():
808
809 if id and db[k]['cooling_id'] != id: continue
810 for l in db[k]['trans_list']:
811 if key in l[2].keys():
812 del l[2][key]
813 db.addChangedKey(k)
814 return db
815
816
817
819
820 '''
821 Add a (key,value) pair to every entry of the PACS database entries.
822
823 @param key: They keyword to be added
824 @type key: str
825 @param val: The default value of the keyword
826 @type val: any
827
828 @keyword db_fn: The filename and path of the database. Only required if db
829 is not given.
830
831 (default: '')
832 @type db_fn: string
833 @keyword db: The database. Is updated and returned. If not given, a filename
834 is required.
835
836 (default: None)
837 @type db: Database()
838 @keyword id: If the (key,val) pair is only to be added to one pacs id,
839 give that id here. If not given, the pair is added to all ids.
840 If id not in db, nothing is done.
841
842 (default: '')
843 @type id: str
844
845 @return: The new database, not yet synchronized.
846 @rtype: Database()
847
848 '''
849
850 if db is None and not db_fn:
851 return
852
853 if db is None:
854 db = Database(db_fn)
855
856 for k in db.keys():
857
858 if id and db[k]['cooling_id'] != id: continue
859 for l in db[k]['trans_list']:
860 if not key in l[2].keys():
861 l[2][key] = val
862 db.addChangedKey(k)
863 return db
864
865
866
868
869 '''
870 Replace a substring of values in database entries, such as the home
871 directory of filenames. This is applied to all strings in the database vals!
872
873 This applies to the value of a dictionary (key,value) pair on the second
874 level of the database (the first level being (model_id, dictionary). Does
875 not work for embedded dictionaries in a dictionary, such as the dust_species
876 embedded dictionary in an MCMax database!
877
878 The database is not synchronized in this method.
879
880 @param db: The database (for now only MCMax or cooling)
881 @type db: Database()
882 @param oldss: The old home name (eg /home/mariev/ or /home/)
883 @type oldss: str
884 @param newss: The new home name (efg /Users/robinl/ or /Users/)
885 @type newss: str
886
887 '''
888
889 for m,dd in db.items():
890 for k,v in dd.items():
891 if isinstance(v,str) and oldss in v:
892 dd[k] = v.replace(oldss,newss)
893 db.addChangedKey(m)
894
895
896
898
899 '''
900 A database class.
901
902 The class creates and manages a dictionary saved to the hard disk.
903
904 It functions as a python dictionary with the extra option of synchronizing
905 the database instance with the dictionary saved on the hard disk.
906
907 No changes will be made to the hard disk copy, unless Database.sync() is
908 called.
909
910 Note that changes made on a deeper level than the (key,value) pairs of the
911 Database (for instance in the case where value is a dict() type itself)
912 will not be automatically taken into account when calling the sync()
913 method. The key for which the value has been changed on a deeper level has
914 to be added to the Database.__changed list by calling addChangedKey(key)
915 manually.
916
917 Running the Database.sync() method will not read the database from the hard
918 disk if no changes were made or if changes were made on a deeper level
919 only. In order to get the most recent version of the Database, without
920 having made any changes, use the .read() method. Note that if changes were
921 made on a deeper level, they will be lost.
922
923 Example:
924
925 >>> import os
926 >>> import Database
927 >>> filename = 'mytest.db'
928 >>> db = Database.Database(filename)
929 No database present at mytest.db. Creating a new one.
930 >>> db['test'] = 1
931 >>> db['test2'] = 'robin'
932 >>> db.sync()
933 >>> db2 = Database.Database(filename)
934 >>> print db2['test']
935 1
936 >>> print db2['test2']
937 robin
938 >>> db2['test'] = 2
939 >>> db2.sync()
940 >>> db.sync()
941 >>> print db['test']
942 1
943 >>> db.read()
944 >>> print db['test']
945 2
946 >>> del db2['test2']
947 >>> db2.sync()
948 >>> print db['test2']
949 robin
950 >>> db.read()
951 >>> print db['test2']
952 Traceback (most recent call last):
953 File "<stdin>", line 1, in <module>
954 KeyError: 'test2'
955 >>> test_dict = dict()
956 >>> db['test'] = test_dict
957 >>> db.sync()
958 >>> db2.read()
959 >>> print db2['test']
960 {}
961 >>> db['test']['test'] = 1
962 >>> db.sync()
963 >>> db2.read()
964 >>> print db2['test']
965 {}
966 >>> db.addChangedKey('test')
967 >>> db.sync()
968 >>> db2.read()
969 >>> print db2['test']
970 {'test': 1}
971 >>> db.setdefault('test','defkey')
972 {'test': 1}
973 >>> db.setdefault('test3','defval')
974 'defval'
975 >>> db.sync()
976 >>> db2.read()
977 >>> print db2['test3']
978 defval
979 >>> os.system('rm %s'%filename)
980 0
981 '''
982
983
985
986 '''
987 Initializing a Database class.
988
989 Upon initialization, the class will read the dictionary saved at the
990 db_path given as a dictionary.
991
992 Note that cPickle is used to write and read these dictionaries.
993
994 If no database exists at db_path, a new dictionary will be created.
995
996 @param db_path: The path to the database on the hard disk.
997 @type db_path: string
998
999 '''
1000
1001 super(Database, self).__init__()
1002 self.path = db_path
1003 self.folder = os.path.split(self.path)[0]
1004 self.read()
1005 self.__changed = []
1006 self.__deleted = []
1007
1008
1009
1011
1012 '''
1013 Delete a key from the database.
1014
1015 This deletion is also done in the hard disk version of the database
1016 when the sync() method is called.
1017
1018 This method can be called by using syntax:
1019 del db[key]
1020
1021 @param key: a dict key that will be deleted from the Database in memory
1022 @type key: a type valid for a dict key
1023
1024 '''
1025
1026 self.__deleted.append(key)
1027 return super(Database,self).__delitem__(key)
1028
1029
1030
1031
1033
1034 '''
1035 Set a dict key with value.
1036
1037 This change is only added to the database saved on the hard disk when
1038 the sync() method is called.
1039
1040 The key is added to the Database.__changed list.
1041
1042 This method can be called by using syntax:
1043 db[key] = value
1044
1045 @param key: a dict key that will be added to the Database in memory
1046 @type key: a type valid for a dict key
1047 @param key: value of the key to be added
1048 @type value: any
1049
1050 '''
1051
1052 self.__changed.append(key)
1053 return super(Database,self).__setitem__(key,value)
1054
1055
1056
1058
1059 '''
1060 Return key's value, if present. Otherwise add key with value default
1061 and return.
1062
1063 Database.__changed is updated with the key if it is not present yet.
1064
1065 @param key: the key to be returned and/or added.
1066 @type key: any valid dict() key
1067 @param args: A default value added to the dict() if the key is not
1068 present. If not specified, default defaults to None.
1069 @type args: any type
1070 @return: key's value or default
1071
1072 '''
1073
1074 if not self.has_key(key):
1075 self.__changed.append(key)
1076 return super(Database,self).setdefault(key,*args)
1077
1078
1079
1080 - def pop(self,key,*args):
1081
1082 '''
1083 If database has key, remove it from the database and return it, else
1084 return default.
1085
1086 If both default is not given and key is not in the database, a KeyError
1087 is raised.
1088
1089 If deletion is successful, this change is only added to the database
1090 saved on the hard disk when the sync() method is called.
1091
1092 The key is added to the Database.__deleted list, if present originally.
1093
1094 @param key: a dict key that will be removed from the Database in memory
1095 @type key: a type valid for a dict key
1096 @param args: value of the key to be returned if key not in Database
1097 @type args: any
1098 @return: value for key, or default
1099
1100 '''
1101
1102 if self.has_key(key):
1103 self.__deleted.append(key)
1104 return super(Database,self).pop(key,*args)
1105
1106
1108
1109 '''
1110 Remove and return an arbitrary (key, value) pair from the database.
1111
1112 A KeyError is raised if the database has an empty dictionary.
1113
1114 If removal is successful, this change is only added to the database
1115 saved on the hard disk when the sync() method is called.
1116
1117 The removed key is added to the Database.__deleted list.
1118
1119 @return: (key, value) pair from Database
1120
1121 '''
1122
1123 (key,value) = super(Database,self).popitem()
1124 self.__deleted.append(key)
1125 return (key,value)
1126
1127
1128
1129 - def update(self,*args,**kwargs):
1130
1131 '''
1132 Update the database with new entries, as with a dictionary.
1133
1134 This update is not synched to the hard disk! Instead Database.__changed
1135 includes the changed keys so that the next sync will save these changes
1136 to the hard disk.
1137
1138 @param args: A dictionary type object to update the Database.
1139 @type args: dict()
1140 @keyword kwargs: Any extra keywords are added as keys with their values.
1141 @type kwargs: any type that is allowed as a dict key type.
1142
1143 '''
1144
1145 self.__changed.extend(kwargs.keys())
1146 self.__changed.extend(args[0].keys())
1147 return super(Database,self).update(*args,**kwargs)
1148
1149
1150
1152
1153 '''
1154 Read the database from the hard disk.
1155
1156 Whenever called, the database in memory is updated with the version
1157 saved on the hard disk.
1158
1159 Any changes made outside the session of this Database() instance will
1160 be applied to the database in memory!
1161
1162 Any changes made to existing keys in current memory before calling
1163 read() will be undone! Use sync() instead of read if you want to keep
1164 current changes inside the session.
1165
1166 If no database is present at the path given to Database() upon
1167 initialisation, a new Database is made by saving an empty dict() at the
1168 requested location.
1169
1170 Reading and saving of the database is done by cPickle-ing the dict().
1171
1172 '''
1173
1174 try:
1175 while True:
1176
1177
1178 dbfile = self._open('r')
1179 try:
1180 try:
1181 db = cPickle.load(dbfile)
1182 dbfile.close()
1183 break
1184 except ValueError:
1185 print 'Loading database failed: ValueError ~ ' + \
1186 'insecure string pickle. Waiting 5 seconds ' + \
1187 'and trying again.'
1188 dbfile.close()
1189 time.sleep(5)
1190 except EOFError:
1191 print 'Loading database failed: EOFError. Waiting 5 ' + \
1192 'seconds and trying again.'
1193 dbfile.close()
1194 time.sleep(5)
1195 self.clear()
1196 super(Database,self).update(db)
1197 except IOError:
1198 print 'No database present at %s. Creating a new one.'%self.path
1199 self.__save()
1200
1201
1202
1204
1205 '''
1206 Update the database on the harddisk and in the memory.
1207
1208 The database is read anew, ie updated with the hard disk version to
1209 account for any changes made by a different program. Next, the changes
1210 made to the database in memory are applied, before saving the database
1211 to the hard disk again.
1212
1213 Any items deleted from the database in memory will also be deleted from
1214 the version saved on the hard disk!
1215
1216 The keys that are changed explicitly are all listed in self.__changed,
1217 to which entries can be added manually using the addChangedKey method,
1218 or automatically by calling .update(), .__setitem__() or .setdefault().
1219
1220 '''
1221
1222 if self.__changed or self.__deleted:
1223 current_db = dict([(k,v)
1224 for k,v in self.items()
1225 if k in set(self.__changed)])
1226 while True:
1227 self.read()
1228 self.__deleted = list(set(self.__deleted))
1229 for key in self.__deleted:
1230 try:
1231 super(Database,self).__delitem__(key)
1232 except KeyError:
1233 pass
1234 super(Database,self).update(current_db)
1235 backup_file = self.__save()
1236 try:
1237
1238
1239
1240 testread = Database(self.path)
1241
1242
1243 if testread != self:
1244 raise TypeError
1245
1246
1247 if backup_file and os.path.isfile(backup_file):
1248 subprocess.call(['rm %s'%(backup_file)],shell=True)
1249 break
1250 except TypeError:
1251
1252
1253 time.sleep(2)
1254 self.__deleted = []
1255 self.__changed = []
1256
1257
1258
1259 else:
1260 self.read()
1261
1262
1264
1265 '''
1266 Save a database.
1267
1268 Only called by Database() internally. Use sync() to save the Database
1269 to the hard disk.
1270
1271 Reading and saving of the database is done by cPickle-ing the dict().
1272
1273 @return: the filename of the backup database is returned
1274 @rtype: string
1275
1276 '''
1277
1278 backup_file = ''
1279 if os.path.isfile(self.path):
1280 i = 0
1281 backup_file = '%s_backup%i'%(self.path,i)
1282 while os.path.isfile(backup_file):
1283 i += 1
1284 backup_file = '%s_backup%i'%(self.path,i)
1285 subprocess.call(['mv %s %s'%(self.path,backup_file)],\
1286 shell=True)
1287
1288
1289
1290 dbfile = self._open('w')
1291 cPickle.dump(self,dbfile)
1292 dbfile.close()
1293 return backup_file
1294
1295
1296
1298
1299 '''
1300 Open the database on the disk for writing, reading or appending access.
1301
1302 A lock is added to the database, which remains in place until the file
1303 object is closed again.
1304
1305 @return: The opened file
1306 @rtype: file()
1307
1308 '''
1309
1310 dbfile = open(self.path,mode)
1311 portalocker.lock(dbfile, portalocker.LOCK_EX)
1312 return dbfile
1313
1314
1315
1317
1318 '''
1319 Add a key to the list of changed keys in the database.
1320
1321 This is useful if a change was made to an entry on a deeper level,
1322 meaning that the __set__() method of Database() is not called directly.
1323
1324 If the key is not added to this list manually, it will not make it into
1325 the database on the hard disk when calling the sync() method.
1326
1327 @param key: the key you want to include in the next sync() call.
1328 @type key: string
1329
1330 '''
1331
1332 if key not in self.__changed: self.__changed.append(key)
1333
1334
1335
1337
1338 '''
1339 Return a list of all keys that have been deleted from the database in
1340 memory.
1341
1342 @return: list of keys
1343 @rtype: list
1344 '''
1345
1346 return self.__deleted
1347
1348
1349
1351
1352 '''
1353 Return a list of all keys that have been changed in the database in
1354 memory.
1355
1356 @return: list of keys
1357 @rtype: list
1358 '''
1359
1360 return self.__changed
1361
1362
1363
1364 if __name__ == "__main__":
1365 import doctest
1366 doctest.testmod()
1367
1368
1369 """
1370 def getDbStructure(db_path,id_index=None,code=None):
1371
1372 '''
1373 Return id_index and code based on db_path, or id_index, or code.
1374
1375 @param db_path: the path + filename of the database
1376 @type db_path: string
1377
1378 @keyword id_index: index of model id in the database entries, specify only
1379 if you know what you're doing! default None is used if
1380 the code keyword is used or the code is taken from the
1381 database filename.
1382
1383 (default: None)
1384 @type id_index: int
1385 @keyword code: name of the (sub-) code for which the deletion is done
1386 (pacs, mcmax, cooling, mline, sphinx), default None can be
1387 used when id_index is given, or if the database filename
1388 includes the codename you want to use deletion for
1389
1390 (default: None)
1391 @type code: string
1392
1393 @return: The id_index and code are returned. The code can be None if
1394 id_index was already defined beforehand, in which case the precise
1395 code doesn't matter, however the method will try to determine the
1396 code based on the filename
1397 @rtype: (int,string)
1398
1399 '''
1400
1401 if id_index <> None and code <> None:
1402 raise IOError('Either specify the code, or the id_index or none of ' +\
1403 'both.')
1404 code_indices = dict([('cooling',1),('mcmax',1),('sphinx',2),('mline',2),\
1405 ('pacs',2)])
1406 if id_index is None:
1407 if code <> None:
1408 id_index = code_indices[code.lower()]
1409 else:
1410 for k,v in code_indices.items():
1411 if k.lower() in os.path.split(db_path)[1].lower():
1412 if id_index <> None:
1413 this_str = 'There is an ambiguity in the filename of'+\
1414 ' the database. At least two of the codes'+\
1415 ' are in the name.'
1416 raise ValueError(this_str)
1417 id_index = v
1418 code = k
1419 if id_index is None:
1420 this_str2 = 'Cannot figure out which code the database is used for. '+\
1421 'Please specify the "code" keyword.'
1422 raise IOError(this_str2)
1423 if code is None:
1424 for k in code_indices.keys():
1425 if k.lower() in os.path.split(db_path)[1].lower():
1426 if code <> None:
1427 print 'WARNING! There is an ambiguity in the ' + \
1428 'filename of the database. At least two of the'+\
1429 ' codes are in the name.'
1430 else:
1431 code = k
1432 return id_index,code
1433
1434
1435
1436 def deleteModel(model_id,db_path,id_index=None,code=None):
1437
1438 '''
1439 Delete a model_id from a database. A back-up is created!
1440
1441 This method is only used by MCMax.py, which uses a database in the old
1442 format.
1443
1444 @param model_id: the model_id
1445 @type model_id: string
1446 @param db_path: the path + filename of the database
1447 @type db_path: string
1448
1449 @keyword id_index: index of model id in the database entries, specify only
1450 if you know what you're doing! default None is used if
1451 the code keyword is used or if the code is taken from
1452 the database filename.
1453
1454 (default: None)
1455 @type id_index: int
1456 @keyword code: name of the (sub-) code for which the deletion is done
1457 (pacs, mcmax, cooling, mline, sphinx), default None can be
1458 used when id_index is given, or if the database filename
1459 includes the codename you want to use deletion for
1460
1461 (default: None)
1462 @type code: string
1463
1464 '''
1465
1466 id_index,code = getDbStructure(db_path=db_path,id_index=id_index,code=code)
1467 subprocess.call(['mv ' + db_path + ' ' + db_path+'old'],\
1468 shell=True,stdout=subprocess.PIPE)
1469 gastronoom_db_old = open(db_path+'old','r')
1470 gastronoom_db = open(db_path,'w')
1471 print "Making the following change(s) to database at %s: \n"%db_path + \
1472 "(Note that if nothing is printed, nothing happened and the model"+\
1473 "id wasn't found.)"
1474 i = 0
1475 while True:
1476 try:
1477 model = cPickle.load(gastronoom_db_old)
1478 if model_id == model[id_index]:
1479 print 'Deleting model id %s from database at %s.'\
1480 %(model_id,db_path)
1481 i += 1
1482 else:
1483 cPickle.dump(model,gastronoom_db)
1484 except EOFError:
1485 print 'Done! %i models were deleted from the database.'%i
1486 break
1487 gastronoom_db_old.close()
1488 gastronoom_db.close()
1489
1490
1491 def convertPacsDbToDictDb(db_path):
1492
1493 '''
1494 ***OBSOLETE***
1495
1496 Convert an old PACS database to the new dictionary based format.
1497
1498 Keeps the old db in existence!
1499
1500 This change was made to speed up the use of the database and makes use of
1501 the Database() class.
1502
1503 @param db_path: Path to the PACS database
1504 @type db_path: string
1505
1506 '''
1507 subprocess.call([' '.join(['mv',db_path,db_path+'_old'])],shell=True)
1508 pacs_db_old = open(db_path+'_old','r')
1509 pacs_db_new = open(db_path,'w')
1510 old_db = []
1511 while True:
1512 try:
1513 model = cPickle.load(pacs_db_old)
1514 old_db.append(model)
1515 except EOFError:
1516 break
1517 new_db = dict()
1518 for entry in old_db:
1519 trans_list = entry[0]
1520 filename = entry[1]
1521 pacs_id = entry[2]
1522 model_id = entry[3]
1523 if not new_db.has_key(pacs_id):
1524 new_db[pacs_id] = dict([('filenames',[])])
1525 new_db[pacs_id]['trans_list'] = trans_list
1526 new_db[pacs_id]['filenames'].append(filename)
1527 new_db[pacs_id]['cooling_id'] = model_id
1528 cPickle.dump(new_db,pacs_db_new)
1529 pacs_db_old.close()
1530 pacs_db_new.close()
1531
1532
1533
1534 def convertGastronoomDatabases(path_gastronoom):
1535
1536 '''
1537 ***OBSOLETE***
1538
1539 Convert all GASTRoNOoM databases (cooling, mline, sphinx) to the dict
1540 format.
1541
1542 This change was made to speed up the use of the database and makes use of
1543 the Database() class.
1544
1545 @param path_gastronoom: the name of the GASTRoNOoM subfolder.
1546 @type path_gastronoom: string
1547
1548 '''
1549
1550 cooling_path = os.path.join(os.path.expanduser('~'),'GASTRoNOoM',\
1551 path_gastronoom,'GASTRoNOoM_cooling_models.db')
1552 convertCoolingDb(path=cooling_path)
1553 mline_path = os.path.join(os.path.expanduser('~'),'GASTRoNOoM',\
1554 path_gastronoom,'GASTRoNOoM_mline_models.db')
1555 convertMlineDb(path=mline_path)
1556 sphinx_paths = glob(os.path.join(os.path.expanduser('~'),'GASTRoNOoM',\
1557 path_gastronoom,'models','GASTRoNOoM*.db'))
1558 convertSphinxDb(paths=sphinx_paths,path_gastronoom=path_gastronoom)
1559 print '** Done!'
1560 print '*******************************'
1561
1562
1563
1564 def convertCoolingDb(path):
1565
1566 '''
1567 ***OBSOLETE***
1568
1569 Convert the cooling db from list to dictionary format.
1570
1571 This change was made to speed up the use of the database and makes use of
1572 the Database() class.
1573
1574 @param path: the full path to the db
1575 @type path: string
1576
1577 '''
1578
1579 print '** Converting Cooling database to dictionary format...'
1580 subprocess.call([' '.join(['mv',path,path+'_old'])],shell=True)
1581 cool_db_old = open(path+'_old','r')
1582 old_db = []
1583 while True:
1584 try:
1585 model = cPickle.load(cool_db_old)
1586 old_db.append(model)
1587 except EOFError:
1588 break
1589 cool_db_old.close()
1590 new_db = dict()
1591 for entry in old_db:
1592 command_list = entry[0]
1593 model_id = entry[1]
1594 new_db[model_id] = command_list
1595 saveDatabase(db_path=path,db=new_db)
1596
1597
1598
1599 def convertMlineDb(path):
1600
1601 '''
1602 ***OBSOLETE***
1603
1604 Convert the mline db from list to dictionary format.
1605
1606 This change was made to speed up the use of the database and makes use of
1607 the Database() class.
1608
1609 @param path: the full path to the db
1610 @type path: string
1611
1612 '''
1613
1614 print '** Converting Mline database to dictionary format...'
1615 subprocess.call([' '.join(['mv',path,path+'_old'])],shell=True)
1616 ml_db_old = open(path+'_old','r')
1617 old_db = []
1618 while True:
1619 try:
1620 model = cPickle.load(ml_db_old)
1621 old_db.append(model)
1622 except EOFError:
1623 break
1624 ml_db_old.close()
1625 new_db = dict()
1626 for entry in old_db:
1627 molecule = entry[0]
1628 molec_dict = entry[1]
1629 molec_id = entry[2]
1630 model_id = entry[3]
1631 if not new_db.has_key(model_id):
1632 new_db[model_id] = dict()
1633 if not new_db[model_id].has_key(molec_id):
1634 new_db[model_id][molec_id] = dict()
1635 new_db[model_id][molec_id][molecule] = molec_dict
1636 saveDatabase(db_path=path,db=new_db)
1637
1638
1639
1640 def convertSphinxDb(paths,path_gastronoom):
1641
1642 '''
1643 ***OBSOLETE***
1644
1645 Convert the sphinx db from list to dictionary format.
1646
1647 This change was made to speed up the use of the database and makes use of
1648 the Database() class.
1649
1650 @param paths: list of all full paths to the dbs
1651 @type paths: list[string]
1652 @param path_gastronoom: the name of the GASTRoNOoM subfolder.
1653 @type path_gastronoom: string
1654
1655 '''
1656
1657 #[subprocess.call([' '.join(['mv',path,path+'_old'])],shell=True)
1658 # for path in paths]if model[0] == self.command_list:
1659 if self.replace_db_entry and 0:
1660 mcmax_db.close()
1661 Database.deleteModel(model_id=str(model[1]),db_path=os.path.join(os.path.expanduser('~'),'MCMax',self.path,'MCMax_models.db'))
1662 raise EOFError
1663 else:
1664 print '** MCMax model has been calculated before with ID ' + str(model[1]) + '.'
1665 self.model_id = model[1]
1666 break
1667 print '** Converting Sphinx database to dictionary format...'
1668 old_db = []
1669 for path in paths:
1670 sph_db_old = open(path,'r')
1671 while True:
1672 try:
1673 model = cPickle.load(sph_db_old)
1674 old_db.append(model)
1675 except EOFError:
1676 break
1677 sph_db_old.close()
1678 new_db = dict()
1679 for entry in old_db:
1680 transition = entry[0]
1681 trans_dict = entry[1]
1682 trans_id = entry[2]
1683 molec_id = entry[3]
1684 model_id = entry[4]
1685 if not new_db.has_key(model_id):
1686 new_db[model_id] = dict()
1687 if not new_db[model_id].has_key(molec_id):
1688 new_db[model_id][molec_id] = dict()
1689 if not new_db[model_id][molec_id].has_key(trans_id):
1690 new_db[model_id][molec_id][trans_id] = dict()
1691 new_db[model_id][molec_id][trans_id][transition] = trans_dict
1692 new_path = os.path.join(os.path.expanduser('~'),'GASTRoNOoM',\
1693 path_gastronoom,'GASTRoNOoM_sphinx_models.db')
1694 saveDatabase(db_path=new_path,db=new_db)
1695
1696
1697
1698 def updateCoolingOpas(path_gastronoom):
1699
1700 '''
1701 ***OBSOLETE***
1702
1703 Update cooling database at path_gastronoom with the new opacity keys.
1704
1705 This method was used when the conversion from a single input opacity file in
1706 GASTRoNOoM to an inputparameter filename for these opacities was made.
1707
1708 @param path_gastronoom: the name of the GASTRoNOoM subfolder.
1709 @type path_gastronoom: string
1710
1711 '''
1712
1713 print '** Converting cooling database opacity entries.'
1714 cooling_path = os.path.join(os.path.expanduser('~'),'GASTRoNOoM',\
1715 path_gastronoom,'GASTRoNOoM_cooling_models.db')
1716 subprocess.call([' '.join(['mv',cooling_path,cooling_path+\
1717 '_old_opakeys'])],shell=True)
1718 old_db = getDatabase(db_path=cooling_path+'_old_opakeys')
1719 new_db = dict()
1720 new_db.update(old_db)
1721 for model_id in new_db.keys():
1722 if not old_db[model_id]['#OPA_FILE'] \
1723 and not old_db[model_id]['#TEMDUST_FILE']:
1724 new_db[model_id]['TEMDUST_FILENAME'] = '"temdust.kappa"'
1725 new_db[model_id]['USE_NEW_DUST_KAPPA_FILES'] = 0
1726 elif old_db[model_id]['#OPA_FILE'] \
1727 and old_db[model_id]['#TEMDUST_FILE']:
1728 new_db[model_id]['TEMDUST_FILENAME'] = '"%s"'\
1729 %os.path.split(old_db[model_id]['#TEMDUST_FILE'])[1]
1730 new_db[model_id]['USE_NEW_DUST_KAPPA_FILES'] = 1
1731 elif not old_db[model_id]['#OPA_FILE'] \
1732 and old_db[model_id]['#TEMDUST_FILE'] \
1733 == '/home/robinl/GASTRoNOoM/src/data/qpr_files/'+\
1734 'temdust_silicates.kappa':
1735 new_db[model_id]['TEMDUST_FILENAME'] = '"temdust.kappa"'
1736 new_db[model_id]['USE_NEW_DUST_KAPPA_FILES'] = 0
1737 else:
1738 subprocess.call([' '.join(['mv',cooling_path+'_old_opakeys',\
1739 cooling_path])],shell=True)
1740 raise ValueError('Something fishy is going on... Discrepant OPA' +\
1741 ' and TEMDUST files for %s!'%model_id)
1742 del new_db[model_id]['#OPA_FILE']
1743 del new_db[model_id]['#TEMDUST_FILE']
1744 saveDatabase(db_path=cooling_path,db=new_db)
1745 gast_data = os.path.join(os.path.expanduser('~'),'GASTRoNOoM','src','data')
1746 qpr_ori = os.path.join(gast_data,'qpr_files','qpr_silicates_jus1992.dat')
1747 if os.path.isfile(qpr_ori):
1748 print '** Copying original opacity files... Remember to copy qpr.dat'+\
1749 ' and temdust.kappa to your VIC account as well!'
1750 qpr = os.path.join(gast_data,'qpr.dat')
1751 temdust = os.path.join(gast_data,'temdust.kappa')
1752 temd_ori = os.path.join(gast_data,'qpr_files',\
1753 'temdust_silicates.kappa')
1754 subprocess.call([' '.join(['cp',qpr_ori,qpr])],shell=True)
1755 subprocess.call([' '.join(['cp',temd_ori,temdust])],shell=True)
1756 print '** Done!'
1757 print '*******************************'
1758
1759
1760 ##############################
1761 ## FIND FIRST COOLING MODEL ##
1762 ##############################
1763
1764 def getStartPosition(gastronoom_db,model_id):
1765 '''Get the start position in the db based on the cooling model_id, to speed up the cross-checking procedure.
1766
1767 Input: gastronoom_db = File object, the opened database
1768 model_id=string, the cooling model_id that you are searching for.
1769 Output: start_position, the db will HAVE TO BE reset to this position, the position is -1 if the id hasn't been found
1770 '''
1771 try:
1772 while True:
1773 last_position = gastronoom_db.tell()
1774 model = cPickle.load(gastronoom_db)
1775 if str(model[-1]) == model_id: #Find the first position of the cooling model_id occurrence, and remember it for repositioning the file object once a check has been done.
1776 start_position = last_position
1777 break
1778 except EOFError:
1779 start_position = -1
1780 return start_position
1781
1782 ##############################
1783 #CONVERT OLD SPHINX DATABASE##
1784 ##############################
1785
1786 def convertOldSphinxDbToNew(path):
1787 '''Convert an old Sphinx database to the new cooling model_id specific database format. Keeps the old db in existence!'''
1788 gastronoom_db_old = open(os.path.join(os.path.expanduser('~'),'GASTRoNOoM',path,\
1789 'GASTRoNOoM_sphinx_models.db'),'r')
1790 while True:
1791 try:
1792 model = cPickle.load(gastronoom_db_old)
1793 this_cooling_id = model[-1]
1794 gastronoom_db = open(os.path.join(os.path.expanduser('~'),'GASTRoNOoM',path,'models',\
1795 'GASTRoNOoM_sphinx_%s.db'%this_cooling_id),'a')
1796 cPickle.dump(model,gastronoom_db)
1797 gastronoom_db.close()
1798 except EOFError:
1799 print 'Database conversion finished.'
1800 gastronoom_db_old.close()
1801 break
1802
1803 ##############################
1804 #REMOVE OBS INFO FROM MCMAX DB
1805 ##############################
1806
1807 def removeObsInfoMCMaxDb(path):
1808 '''Convert an old MCMax database that includes observation information (ie ray tracing )to a new database without it.
1809 Keeps the old db in existence!
1810
1811 Input: path=string, path where the database is saved (ie path_mcmax). Db is assumed to be called MCMax_models.db!
1812 '''
1813 subprocess.call([' '.join(['mv',os.path.join(os.path.expanduser('~'),'MCMax',path,'MCMax_models.db'),\
1814 os.path.join(os.path.expanduser('~'),'MCMax',path,'MCMax_models_old_obsinfo.db')])],shell=True)
1815 mcmax_db_old = open(os.path.join(os.path.expanduser('~'),'MCMax',path,'MCMax_models_old_obsinfo.db'),'r')
1816 mcmax_db_new = open(os.path.join(os.path.expanduser('~'),'MCMax',path,'MCMax_models.db'),'a')
1817 while True:
1818 try:
1819 model = cPickle.load(mcmax_db_old)
1820 model[0] = [m for m in model[0] if m != os.path.join(os.path.expanduser('~'),'MCMax','src','Spec.out')]
1821 cPickle.dump(model,mcmax_db_new)
1822 except EOFError:
1823 print 'Database conversion finished.'
1824 mcmax_db_old.close()
1825 mcmax_db_new.close()
1826 break
1827
1828 ##############################
1829 #CONVERT OLD MCMAX DATABASE ##
1830 ##############################
1831
1832 def convertOldMCMaxDbToNew(path):
1833 '''Convert an old MCMax database to the new database based on actual inputfiles instead of command line options.
1834 Keeps the old db in existence!
1835
1836 Input: path=string, path where the database is saved. Db is assumed to be called MCMax_models.db!'''
1837 subprocess.call([' '.join(['mv',os.path.join(os.path.expanduser('~'),'MCMax',path,'MCMax_models.db'),\
1838 os.path.join(os.path.expanduser('~'),'MCMax',path,'MCMax_models_old.db')])],shell=True)
1839 mcmax_db_old = open(os.path.join(os.path.expanduser('~'),'MCMax',path,'MCMax_models_old.db'),'r')
1840 mcmax_db_new = open(os.path.join(os.path.expanduser('~'),'MCMax',path,'MCMax_models.db'),'a')
1841 while True:
1842 try:
1843 model = cPickle.load(mcmax_db_old)
1844 model[0] = [m.replace('-s ','') for m in model[0]]
1845 abun_lines = [(i,m.split()) for i,m in enumerate(model[0]) if m[:4] == 'abun']
1846 n = len(abun_lines)
1847 for i,m in abun_lines:
1848 model[0][i] = m[0]
1849 model[0][i+n:i+n] = [m[1]]
1850 cPickle.dump(model,mcmax_db_new)
1851 except EOFError:
1852 print 'Database conversion finished.'
1853 mcmax_db_old.close()
1854 mcmax_db_new.close()
1855 break
1856
1857
1858 def updateDatabase(db_path,db):
1859
1860 '''
1861 Update the database saved on the harddisk. The database is read again,
1862 updated and then saved to ensure no data recently added to the database are
1863 lost.
1864
1865 Input: db_path=string, the path to the database.
1866 db=dict, the new version of the database.
1867 Output: current_db=dict(), the current version of the db is returned.
1868
1869 '''
1870
1871 current_db = getDatabase(db_path=db_path)
1872 current_db.update(db)
1873 saveDatabase(db_path=db_path,db=current_db)
1874 return current_db
1875
1876 ##############################
1877 ## SAVE GASTRONOOM DATABASE ##
1878 ##############################
1879
1880 def saveDatabase(db_path,db):
1881 '''Save a database.
1882
1883 Input: db_path=string, the path to the database.
1884 db=dict, the new version of the database.
1885 '''
1886 dbfile = open(db_path,'w')
1887 cPickle.dump(db,dbfile)
1888 dbfile.close()
1889
1890 ##############################
1891 ### GET GASTRONOOM DATABASE###
1892 ##############################
1893
1894 def getDatabase(db_path):
1895
1896 '''
1897 Return a database.
1898
1899 Input: db_path=string, the path to the database.
1900 '''
1901 try:
1902 dbfile = open(db_path,'r')
1903 while True:
1904 try:
1905 db = cPickle.load(dbfile)
1906 break
1907 except ValueError:
1908 print 'Loading database failed: ValueError ~ insecure string pickle. Waiting 10 seconds and trying again.'
1909 time.sleep(10)
1910 dbfile.close()
1911 return db
1912 except IOError:
1913 return dict()
1914
1915
1916
1917
1918 def browseDatabase(db_path,model_id,code=None,id_index=None,conditional=None,cond_index=None):
1919 '''Browse a database and return the entry from it based on the id.
1920
1921 Input: db_path=str, the filepath to the database to browse
1922 model_id=str, the model_id (molec specific in case of sphinx and mline, general if cooling)
1923 OPTIONAL id_index=int, default=None, index of model id in the database entries, specify only if you know what you're doing!
1924 default None is used if the code keyword is used or the code is taken from the database filename.
1925 OPTIONAL code=string, default=None, name of the (sub-) code for which the db is browsed (pacs, mcmax, cooling, mline, sphinx),
1926 default None can be used when id_index is given, or if the database filename includes the codename
1927 OPTIONAL conditional=anything, default=None, if an extra condition is imposed for one part of the db entry, cond_id has to be
1928 specified.
1929 OPTIONAL cond_index=int, default=None, index of the conditional term in the db entry, must be specified if conditional is not None
1930 Output: dict giving the input parameters
1931 '''
1932 if (conditional is None and cond_index <> None) or (conditional <> None and cond_index is None):
1933 raise IOError('If a conditional term is added, its database index must be given as well; and vice versa. Aborting.')
1934 id_index,code = getDbStructure(db_path=db_path,code=code,id_index=id_index)
1935 star_db = open(db_path,'r')
1936 try:
1937 while True:
1938 entry = cPickle.load(star_db)
1939 if model_id == entry[id_index]:
1940 if conditional <> None:
1941 if entry[cond_index] == conditional:
1942 star_db.close()
1943 break
1944 else:
1945 pass
1946 else:
1947 star_db.close()
1948 break
1949 except EOFError:
1950 star_db.close()
1951 raise EOFError('The %s you requested, is not available in the %s database.'%(model_id,code is None and 'requested' or code))
1952 return entry
1953
1954 ##############################
1955 #ADD PARAMETER TO MODEL IN DB#
1956 ##############################
1957
1958 def addParameterToModel(model_id,database_path,parameter,value):
1959 '''Add a single parameter to a model's parameter dictionary in a database. A backup of the old database is made!
1960
1961 Input: model_id=string, the model_id you're referring to
1962 database_path=string, the database
1963 parameter=string, name of the parameter as it appears in the inputfile for the respective code
1964 value=int/float/..., the value of the parameter
1965 '''
1966 subprocess.call(['mv ' + database_path + ' ' + database_path+'old'],shell=True,stdout=subprocess.PIPE)
1967 gastronoom_db_old = open(database_path+'old','r')
1968 gastronoom_db = open(database_path,'w')
1969 print 'Making a change to database at %s'%database_path
1970 while True:
1971 try:
1972 model = cPickle.load(gastronoom_db_old)
1973 if model_id == model[1]:
1974 print 'Adding the %s parameter with value %s to the dictionary of the model with id %s.'%(parameter,str(value),model_id)
1975 new_dict = model[0]
1976 new_dict[parameter] = value
1977 model = [new_dict,model_id]
1978 cPickle.dump(model,gastronoom_db)
1979 except EOFError:
1980 break
1981 gastronoom_db_old.close()
1982 gastronoom_db.close()
1983
1984
1985
1986 def addParameterToMCMaxDb(database_path,parameter,value,last_par):
1987
1988 '''
1989 Add a single parameter to all models' parameter dictionaries in an MCMax database, if it is not yet present. A backup of the old database is made!
1990
1991 Input: database_path=string, the database
1992 parameter=string, name of the parameter as it appears in the inputfile for the respective code
1993 value=int/float/..., the value of the parameter
1994 last_par=string, the parameter is inserted after this parameter
1995 '''
1996 subprocess.call(['mv ' + database_path + ' ' + database_path+'old'],shell=True,stdout=subprocess.PIPE)
1997 gastronoom_db_old = open(database_path+'old','r')
1998 gastronoom_db = open(database_path,'w')
1999 print 'Making a change to database at %s'%database_path
2000 while True:
2001 try:
2002 model = cPickle.load(gastronoom_db_old)
2003 model_id = model[1]
2004 new_dict = model[0]
2005 keys = [entry.split('=')[0] for entry in new_dict]
2006 index = keys.index(last_par)+1
2007 if keys[index] != parameter:
2008 print 'Adding the %s parameter with value %s to the dictionary of the model with id %s.'%(parameter,str(value),model_id)
2009 new_dict[index:index] = ['%s=%s'%(parameter,str(value))]
2010 model = [new_dict,model_id]
2011 cPickle.dump(model,gastronoom_db)
2012 except EOFError:
2013 break
2014 gastronoom_db_old.close()
2015 gastronoom_db.close()
2016
2017
2018 """
2019