COMPASS  5.0.0
End-to-end AO simulation tool using GPU acceleration
hdf5_util.py
1 
37 
38 import h5py
39 import pandas
40 import os
41 import numpy as np
42 from subprocess import check_output
43 
44 
45 def updateParamDict(pdict, pClass, prefix):
46  """
47  Update parameters dictionnary pdict with all the parameters of pClass.
48  Prefix must be set to define the key value of the new dict entries
49  """
50  if (isinstance(pClass, list)):
51  params = [
52  i for i in dir(pClass[0])
53  if (not i.startswith('_') and not i.startswith('set_') and
54  not i.startswith('get_'))
55  ]
56  for k in params:
57  pdict.update({
58  prefix + k: [
59  p.__dict__[prefix + k].encode("utf8") if isinstance(
60  p.__dict__[prefix + k], str) else
61  p.__dict__[prefix + k] for p in pClass
62  ]
63  })
64 
65  else:
66  params = [
67  i for i in dir(pClass)
68  if (not i.startswith('_') and not i.startswith('set_') and
69  not i.startswith('get_'))
70  ]
71 
72  for k in params:
73  if isinstance(pClass.__dict__[prefix + k], str):
74  pdict.update({prefix + k: pClass.__dict__[prefix + k].encode("utf8")})
75  else:
76  pdict.update({prefix + k: pClass.__dict__[prefix + k]})
77 
78 
79 def params_dictionary(config):
80  """ Create and returns a dictionary of all the config parameters with the
81  corresponding keys for further creation of database and save files
82 
83  :param config: (module) : simulation parameters
84 
85  :return param_dict: (dictionary) : dictionary of parameters
86  """
87 
88  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).strip()
89 
90  param_dict = {"simul_name": config.simul_name.encode('utf8'), "commit": commit}
91 
92  updateParamDict(param_dict, config.p_loop, "_Param_loop__")
93  updateParamDict(param_dict, config.p_geom, "_Param_geom__")
94  updateParamDict(param_dict, config.p_tel, "_Param_tel__")
95  if config.p_atmos is not None:
96  updateParamDict(param_dict, config.p_atmos, "_Param_atmos__")
97  if config.p_target is not None:
98  updateParamDict(param_dict, config.p_targets, "_Param_target__")
99  param_dict.update({"ntargets": len(config.p_targets)})
100  if config.p_wfss is not None:
101  updateParamDict(param_dict, config.p_wfss, "_Param_wfs__")
102  param_dict.update({"nwfs": len(config.p_wfss)})
103  if config.p_dms is not None:
104  updateParamDict(param_dict, config.p_dms, "_Param_dm__")
105  param_dict.update({"ndms": len(config.p_dms)})
106  if config.p_controllers is not None:
107  updateParamDict(param_dict, config.p_controllers, "_Param_controller__")
108  param_dict.update({"ncontrollers": len(config.p_controllers)})
109  if config.p_centroiders is not None:
110  updateParamDict(param_dict, config.p_centroiders, "_Param_centroider__")
111  param_dict.update({"ncentroiders": len(config.p_centroiders)})
112 
113  for k in param_dict.keys():
114  if type(param_dict[k]) is list:
115  param_dict[k] = [d if d is not None else -10 for d in param_dict[k]]
116  elif param_dict[k] is None:
117  param_dict[k] = -10
118  return param_dict
119 
120 
121 def create_file_attributes(filename, param_dict):
122  """ create_file_attributes(filename,config)
123  Create an hdf5 file wtih attributes corresponding to all simulation parameters
124 
125  :param:
126 
127  filename : (str) : full path + filename to create
128 
129  config : () : simulation parameters
130  """
131  f = h5py.File(filename, "w")
132 
133  for i in list(param_dict.keys()):
134  if (isinstance(param_dict[i], str)):
135  attr = param_dict[i].encode("utf-8")
136  elif (isinstance(param_dict[i], list)):
137  attr = [
138  s.encode("utf-8") if isinstance(s, str) else s for s in param_dict[i]
139  ]
140  else:
141  attr = param_dict[i]
142  f.attrs.create(i, attr)
143  f.attrs.create("validity", False)
144  print(filename, "initialized")
145  f.close()
146 
147 
148 def init_hdf5_files(savepath, param_dict, matricesToLoad):
149  """ TODO: docstring
150  """
151  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).decode('utf8').strip()
152  # if not(matricesToLoad.has_key("A")):
153  if "A" not in matricesToLoad:
154  df = pandas.read_hdf(savepath + "matricesDataBase.h5", "A")
155  ind = len(df.index)
156  filename = savepath + "turbu/A_" + commit + "_" + str(ind) + ".h5"
157  create_file_attributes(filename, param_dict)
158  updateDataBase(filename, savepath, "A")
159 
160  if not ("dm" in matricesToLoad):
161  df = pandas.read_hdf(savepath + "matricesDataBase.h5", "dm")
162  ind = len(df.index)
163  filename = savepath + "mat/dm_" + commit + "_" + str(ind) + ".h5"
164  create_file_attributes(filename, param_dict)
165  updateDataBase(filename, savepath, "dm")
166 
167  if not ("imat" in matricesToLoad):
168  df = pandas.read_hdf(savepath + "matricesDataBase.h5", "imat")
169  ind = len(df.index)
170  filename = savepath + "mat/imat_" + commit + "_" + str(ind) + ".h5"
171  create_file_attributes(filename, param_dict)
172  updateDataBase(filename, savepath, "imat")
173 
174 
175 def initDataBase(savepath, param_dict):
176  """ Initialize and create the database for all the saved matrices. This database
177  will be placed on the top of the savepath and be named matricesDataBase.h5.
178 
179  :parameters:
180 
181  savepath : (str) : path to the data repertory
182 
183  param_dict : (dictionary) : parameters dictionary
184  """
185  keys = list(param_dict.keys())
186  keys.append("path2file")
187  keys.append("validity")
188  df = pandas.DataFrame(columns=keys)
189  store = pandas.HDFStore(savepath + "matricesDataBase.h5")
190  store.put("A", df)
191  store.put("imat", df)
192  store.put("dm", df)
193  store.close()
194  print("Matrices database created")
195 
196 
197 def updateDataBase(h5file, savepath, matrix_type):
198  """ Update the database adding a new row to the matrix_type database.
199 
200  :parameters:
201 
202  h5file : (str) : path to the new h5 file to add
203 
204  savepath : (str) : path to the data directory
205 
206  matrix_type : (str) : type of matrix to store ("A","B","istx","isty"
207  "istx","eigenv","imat","U"
208  "pztok" or "pztnok")
209  """
210  if (matrix_type == "A" or matrix_type == "imat" or matrix_type == "dm"):
211  f = h5py.File(h5file, "r")
212  store = pandas.HDFStore(savepath + "matricesDataBase.h5")
213  df = store[matrix_type]
214  ind = len(df.index)
215  for i in list(f.attrs.keys()):
216  df.loc[ind, i] = f.attrs[i]
217  df.loc[ind, "path2file"] = h5file
218  df.loc[ind, "validity"] = False
219  store.put(matrix_type, df)
220  store.close()
221  f.close()
222  else:
223  raise ValueError("Wrong matrix_type specified. See documentation")
224 
225 
226 def save_hdf5(filename, dataname, data):
227  """ save_hdf5(filename, dataname, data)
228  Create a dataset in an existing hdf5 file filename and store data in it
229 
230  :param:
231 
232  filename: (str) : full path to the file
233 
234  dataname : (str) : name of the data (imat, cmat...)
235 
236  data : np.array : data to save
237  """
238  f = h5py.File(filename, "r+")
239  f.create_dataset(dataname, data=data)
240  f.close()
241 
242 
243 def save_h5(filename, dataname, config, data):
244  """ save_hdf5(filename, dataname, config, data)
245  Create a hdf5 file and store data in it with full header from config parameters
246  Usefull to backtrace data origins
247 
248  :param:
249 
250  filename: (str) : full path to the file
251 
252  dataname : (str) : name of the data (imat, cmat...)
253 
254  config : (module) : config parameters
255 
256  data : np.array : data to save
257  """
258  p_dict = params_dictionary(config)
259  create_file_attributes(filename, p_dict)
260  save_hdf5(filename, dataname, data)
261  print(filename, "has been written")
262 
263 
264 def checkMatricesDataBase(savepath, config, param_dict):
265  """ Check in the database if the current config have been already run. If so,
266  return a dictionary containing the matrices to load and their path. Matrices
267  which don't appear in the dictionary will be computed, stored and added
268  to the database during the simulation.
269  If the database doesn't exist, this function creates it.
270 
271  :parameters:
272 
273  savepath : (str) : path to the data repertory
274 
275  config : (module) : simulation parameters
276 
277  param_dict : (dictionary) : parameters dictionary
278 
279  :return:
280 
281  matricesToLoad : (dictionary) : matrices that will be load and their path
282  """
283 
284  matricesToLoad = {}
285  if (os.path.exists(savepath + "matricesDataBase.h5")):
286  checkTurbuParams(savepath, config, param_dict, matricesToLoad)
287  checkDmsParams(savepath, config, param_dict, matricesToLoad)
288  # if(matricesToLoad.has_key("pztok")):
289  if "dm" in matricesToLoad:
290  checkControlParams(savepath, config, param_dict, matricesToLoad)
291 
292  else:
293  initDataBase(savepath, param_dict)
294  init_hdf5_files(savepath, param_dict, matricesToLoad)
295  return matricesToLoad
296 
297 
298 def checkTurbuParams(savepath, config, pdict, matricesToLoad):
299  """ Compare the current turbulence parameters to the database. If similar parameters
300  are found, the matricesToLoad dictionary is completed.
301  Since all the turbulence matrices are computed together, we only check the parameters
302  for the A matrix : if we load A, we load B, istx and isty too.
303 
304  :parameters:
305 
306  config : (module) : simulation parameters
307 
308  matricesToLoad : (dictionary) : matrices that will be load and their path
309  """
310  dataBase = pandas.read_hdf(savepath + "matricesDataBase.h5", "A")
311  param2test = [
312  "_Param_atmos__r0", "_Param_atmos__seeds", "_Param_atmos__L0",
313  "_Param_atmos__alt", "_Param_tel__diam", "_Param_tel__cobs",
314  "_Param_geom__pupdiam", "_Param_geom__zenithangle", "_Param_target__xpos",
315  "_Param_target__ypos", "_Param_wfs__xpos", "_Param_wfs__ypos"
316  ]
317 
318  for i in dataBase.index:
319  cc = 0
320  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).strip()
321  if (dataBase.loc[i, "validity"] and (dataBase.loc[i, "commit"] == commit)):
322  cond = True
323  while (cond):
324  if (cc >= len(param2test)):
325  break
326  else:
327  cond = dataBase.loc[i, param2test[cc]] == pdict[param2test[cc]]
328  if type(cond) is np.ndarray:
329  cond = cond.all()
330  cc += 1
331  # For debug
332 
333  if not cond:
334  cc -= 1
335  print(param2test[cc] + " has changed from ",
336  dataBase.loc[i, param2test[cc]], " to ", pdict[param2test[cc]])
337 
338  else:
339  cond = False
340 
341  if (cond):
342  matricesToLoad["index_turbu"] = i
343  matricesToLoad["A"] = dataBase.loc[i, "path2file"]
344  # dataBase = pandas.read_hdf(savepath + "matricesDataBase.h5", "B")
345  # matricesToLoad["B"] = dataBase.loc[i, "path2file"]
346  # dataBase = pandas.read_hdf(savepath + "matricesDataBase.h5",
347  # "istx")
348  # matricesToLoad["istx"] = dataBase.loc[i, "path2file"]
349  # dataBase = pandas.read_hdf(savepath + "matricesDataBase.h5",
350  # "isty")
351  # matricesToLoad["isty"] = dataBase.loc[i, "path2file"]
352  return
353 
354 
355 def checkControlParams(savepath, config, pdict, matricesToLoad):
356  """ Compare the current controller parameters to the database. If similar parameters
357  are found, matricesToLoad dictionary is completed.
358  Since all the controller matrices are computed together, we only check the parameters
359  for the imat matrix : if we load imat, we load eigenv and U too.
360 
361  :parameters:
362 
363  config : (module) : simulation parameters
364 
365  matricesToLoad : (dictionary) : matrices that will be load and their path
366  """
367  dataBase = pandas.read_hdf(savepath + "matricesDataBase.h5", "imat")
368 
369  param2test = [
370  "_Param_tel__diam", "_Param_tel__t_spiders", "_Param_tel__spiders_type",
371  "_Param_tel__pupangle", "_Param_tel__referr", "_Param_tel__std_piston",
372  "_Param_tel__std_tt", "_Param_tel__type_ap", "_Param_tel__nbrmissing",
373  "_Param_tel__cobs", "_Param_geom__pupdiam", "nwfs", "_Param_wfs__type",
374  "_Param_wfs__nxsub", "_Param_wfs__npix", "_Param_wfs__pixsize",
375  "_Param_wfs__fracsub", "_Param_wfs__xpos", "_Param_wfs__ypos",
376  "_Param_wfs__Lambda", "_Param_wfs__dms_seen", "_Param_wfs__fssize",
377  "_Param_wfs__fstop", "_Param_wfs__pyr_ampl", "_Param_wfs__pyr_loc",
378  "_Param_wfs__pyr_npts", "_Param_wfs__pyr_pup_sep", "_Param_wfs__pyrtype",
379  "ndms", "_Param_dm__type", "_Param_dm__alt", "_Param_dm__coupling",
380  "_Param_dm__margin_in", "_Param_dm__margin_out", "_Param_dm__nact",
381  "_Param_dm__nkl", "_Param_dm__type_kl", "_Param_dm__push4imat",
382  "_Param_dm__thresh", "_Param_dm__unitpervolt", "ncentroiders",
383  "_Param_centroider__type", "_Param_centroider__nmax",
384  "_Param_centroider__nwfs", "_Param_centroider__sizex",
385  "_Param_centroider__sizey", "_Param_centroider__thresh",
386  "_Param_centroider__type_fct", "_Param_centroider__weights",
387  "_Param_centroider__width"
388  ]
389 
390  for i in dataBase.index:
391  cc = 0
392  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).strip()
393  if (dataBase.loc[i, "validity"] and (dataBase.loc[i, "commit"] == commit)):
394  cond = True
395  while (cond):
396  if (cc >= len(param2test)):
397  break
398  else:
399  cond = dataBase.loc[i, param2test[cc]] == pdict[param2test[cc]]
400  if type(cond) is np.ndarray:
401  cond = cond.all()
402  cc += 1
403  # For debug
404 
405  if not cond:
406  cc -= 1
407  print(param2test[cc] + " has changed from ",
408  dataBase.loc[i, param2test[cc]], " to ", pdict[param2test[cc]])
409 
410  else:
411  cond = False
412 
413  if (cond):
414  matricesToLoad["index_control"] = i
415  matricesToLoad["imat"] = dataBase.loc[i, "path2file"]
416  return
417 
418 
419 def checkDmsParams(savepath, config, pdict, matricesToLoad):
420  """ Compare the current controller parameters to the database. If similar parameters
421  are found, matricesToLoad dictionary is completed.
422  Since all the dms matrices are computed together, we only check the parameters
423  for the pztok matrix : if we load pztok, we load pztnok too.
424 
425  :parameters:
426 
427  config : (module) : simulation parameters
428 
429  matricesToLoad : (dictionary) : matrices that will be load and their path
430  """
431  dataBase = pandas.read_hdf(savepath + "matricesDataBase.h5", "dm")
432 
433  param2test = [
434  "_Param_tel__diam", "_Param_tel__t_spiders", "_Param_tel__spiders_type",
435  "_Param_tel__pupangle", "_Param_tel__referr", "_Param_tel__std_piston",
436  "_Param_tel__std_tt", "_Param_tel__type_ap", "_Param_tel__nbrmissing",
437  "_Param_tel__cobs", "_Param_geom__pupdiam", "nwfs", "_Param_wfs__type",
438  "_Param_wfs__nxsub", "_Param_wfs__npix", "_Param_wfs__pixsize",
439  "_Param_wfs__fracsub", "_Param_wfs__xpos", "_Param_wfs__ypos",
440  "_Param_wfs__Lambda", "_Param_wfs__dms_seen", "_Param_wfs__fssize",
441  "_Param_wfs__fstop", "_Param_wfs__pyr_ampl", "_Param_wfs__pyr_loc",
442  "_Param_wfs__pyr_npts", "_Param_wfs__pyrtype", "_Param_wfs__pyr_pup_sep",
443  "ndms", "_Param_dm__type", "_Param_dm__alt", "_Param_dm__coupling",
444  "_Param_dm__margin_in", "_Param_dm__margin_out", "_Param_dm__nkl",
445  "_Param_dm__nact", "_Param_dm__type_kl", "_Param_dm__push4imat",
446  "_Param_dm__thresh", "_Param_dm__unitpervolt"
447  ]
448 
449  for i in dataBase.index:
450  cc = 0
451  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).strip()
452  if (dataBase.loc[i, "validity"] and (dataBase.loc[i, "commit"] == commit)):
453  cond = True
454  while (cond):
455  if (cc >= len(param2test)):
456  break
457  else:
458  cond = dataBase.loc[i, param2test[cc]] == pdict[param2test[cc]]
459  if type(cond) is np.ndarray:
460  cond = cond.all()
461  cc += 1
462  # For debug
463 
464  if not cond:
465  cc -= 1
466  print((param2test[cc] + " has changed from ",
467  dataBase.loc[i, param2test[cc]], " to ", pdict[param2test[cc]]))
468 
469  else:
470  cond = False
471 
472  if (cond):
473  matricesToLoad["index_dms"] = i
474  matricesToLoad["dm"] = dataBase.loc[i, "path2file"]
475  return
476 
477 
478 def validDataBase(savepath, matricesToLoad):
479  """ TODO: docstring
480  """
481  store = pandas.HDFStore(savepath + "matricesDataBase.h5")
482  if not ("A" in matricesToLoad):
483  validInStore(store, savepath, "A")
484  if not ("dm" in matricesToLoad):
485  validInStore(store, savepath, "dm")
486  if not ("imat" in matricesToLoad):
487  validInStore(store, savepath, "imat")
488  store.close()
489 
490 
491 def validFile(filename):
492  """ TODO: docstring
493  """
494  f = h5py.File(filename, "r+")
495  f.attrs["validity"] = True
496  f.close()
497 
498 
499 def validInStore(store, savepath, matricetype):
500  """ TODO: docstring
501  """
502  df = store[matricetype]
503  ind = len(df.index) - 1
504  df.loc[ind, "validity"] = True
505  store[matricetype] = df
506  validFile(df.loc[ind, "path2file"])
507 
508 
509 def configFromH5(filename, config):
510  """ TODO: docstring
511  """
512  #import shesha as ao
513 
514  f = h5py.File(filename, "r")
515 
516  config.simul_name = str(f.attrs.get("simulname"))
517  # Loop
518  config.p_loop.set_niter(f.attrs.get("niter"))
519  config.p_loop.set_ittime(f.attrs.get("ittime"))
520 
521  # geom
522  config.p_geom.set_zenithangle(f.attrs.get("zenithangle"))
523  config.p_geom.set_pupdiam(f.attrs.get("pupdiam"))
524 
525  # Tel
526  config.p_tel.set_diam(f.attrs.get("tel_diam"))
527  config.p_tel.set_cobs(f.attrs.get("cobs"))
528  config.p_tel.set_nbrmissing(f.attrs.get("nbrmissing"))
529  config.p_tel.set_t_spiders(f.attrs.get("t_spiders"))
530  config.p_tel.set_type_ap(str(f.attrs.get("type_ap")))
531  config.p_tel.set_spiders_type(str(f.attrs.get("spiders_type")))
532  config.p_tel.set_pupangle(f.attrs.get("pupangle"))
533  config.p_tel.set_referr(f.attrs.get("referr"))
534  config.p_tel.set_std_piston(f.attrs.get("std_piston"))
535  config.p_tel.set_std_tt(f.attrs.get("std_tt"))
536 
537  # Atmos
538  config.p_atmos.set_r0(f.attrs.get("r0"))
539  config.p_atmos.set_nscreens(f.attrs.get("nscreens"))
540  config.p_atmos.set_frac(f.attrs.get("frac"))
541  config.p_atmos.set_alt(f.attrs.get("atm.alt"))
542  config.p_atmos.set_windspeed(f.attrs.get("windspeed"))
543  config.p_atmos.set_winddir(f.attrs.get("winddir"))
544  config.p_atmos.set_L0(f.attrs.get("L0"))
545  config.p_atmos.set_seeds(f.attrs.get("seeds"))
546 
547  # Target
548  config.p_target.set_nTargets(f.attrs.get("ntargets"))
549  config.p_target.set_xpos(f.attrs.get("target.xpos"))
550  config.p_target.set_ypos(f.attrs.get("target.ypos"))
551  config.p_target.set_Lambda(f.attrs.get("target.Lambda"))
552  config.p_target.set_mag(f.attrs.get("target.mag"))
553  if (f.attrs.get("target.dms_seen") > -1):
554  config.p_target.set_dms_seen(f.attrs.get("target.dms_seen"))
555 
556  # WFS
557  config.p_wfss = []
558  for i in range(f.attrs.get("nwfs")):
559  config.p_wfss.append(ao.Param_wfs())
560  config.p_wfss[i].set_type(str(f.attrs.get("type")[i]))
561  config.p_wfss[i].set_nxsub(f.attrs.get("nxsub")[i])
562  config.p_wfss[i].set_npix(f.attrs.get("npix")[i])
563  config.p_wfss[i].set_pixsize(f.attrs.get("pixsize")[i])
564  config.p_wfss[i].set_fracsub(f.attrs.get("fracsub")[i])
565  config.p_wfss[i].set_xpos(f.attrs.get("wfs.xpos")[i])
566  config.p_wfss[i].set_ypos(f.attrs.get("wfs.ypos")[i])
567  config.p_wfss[i].set_Lambda(f.attrs.get("wfs.Lambda")[i])
568  config.p_wfss[i].set_gsmag(f.attrs.get("gsmag")[i])
569  config.p_wfss[i].set_optthroughput(f.attrs.get("optthroughput")[i])
570  config.p_wfss[i].set_zerop(f.attrs.get("zerop")[i])
571  config.p_wfss[i].set_noise(f.attrs.get("noise")[i])
572  config.p_wfss[i].set_atmos_seen(f.attrs.get("atmos_seen")[i])
573  config.p_wfss[i].set_fstop(str(f.attrs.get("fstop")[i]))
574  config.p_wfss[i].set_pyr_npts(f.attrs.get("pyr_npts")[i])
575  config.p_wfss[i].set_pyr_ampl(f.attrs.get("pyr_ampl")[i])
576  config.p_wfss[i].set_pyrtype(str(f.attrs.get("pyrtype")[i]))
577  config.p_wfss[i].set_pyr_loc(str(f.attrs.get("pyr_loc")[i]))
578  config.p_wfss[i].set_fssize(f.attrs.get("fssize")[i])
579  if ((f.attrs.get("dms_seen")[i] > -1).all()):
580  config.p_wfss[i].set_dms_seen(f.attrs.get("dms_seen")[i])
581 
582  # LGS
583  config.p_wfss[i].set_gsalt(f.attrs.get("gsalt")[i])
584  config.p_wfss[i].set_lltx(f.attrs.get("lltx")[i])
585  config.p_wfss[i].set_llty(f.attrs.get("llty")[i])
586  config.p_wfss[i].set_laserpower(f.attrs.get("laserpower")[i])
587  config.p_wfss[i].set_lgsreturnperwatt(f.attrs.get("lgsreturnperwatt")[i])
588  config.p_wfss[i].set_proftype(str(f.attrs.get("proftype")[i]))
589  config.p_wfss[i].set_beamsize(f.attrs.get("beamsize")[i])
590 
591  # DMs
592  config.p_dms = []
593  if (f.attrs.get("ndms")):
594  for i in range(f.attrs.get("ndms")):
595  config.p_dms.append(ao.Param_dm())
596  config.p_dms[i].set_type(str(f.attrs.get("type")[i]))
597  config.p_dms[i].set_nact(f.attrs.get("nact")[i])
598  config.p_dms[i].set_alt(f.attrs.get("dm.alt")[i])
599  config.p_dms[i].set_thresh(f.attrs.get("dm.thresh")[i])
600  config.p_dms[i].set_coupling(f.attrs.get("coupling")[i])
601  config.p_dms[i].set_unitpervolt(f.attrs.get("unitpervolt")[i])
602  config.p_dms[i].set_push4imat(f.attrs.get("push4imat")[i])
603 
604  # Centroiders
605  config.p_centroiders = []
606  if (f.attrs.get("ncentroiders")):
607  for i in range(f.attrs.get("ncentroiders")):
608  config.p_centroiders.append(ao.Param_centroider())
609  config.p_centroiders[i].set_nwfs(f.attrs.get("centro.nwfs")[i])
610  config.p_centroiders[i].set_type(str(f.attrs.get("type")[i]))
611  config.p_centroiders[i].set_type_fct(str(f.attrs.get("type_fct")[i]))
612  config.p_centroiders[i].set_nmax(f.attrs.get("nmax")[i])
613  config.p_centroiders[i].set_thresh(f.attrs.get("centroider.thresh")[i])
614  if (f.attrs.get("weights")[i]):
615  config.p_centroiders[i].set_weights(f.attrs.get("weights")[i])
616  config.p_centroiders[i].set_width(f.attrs.get("width")[i])
617  config.p_rtc.set_centroiders(config.p_centroiders)
618 
619  # Controllers
620  config.p_controllers = []
621  if (f.attrs.get("ncontrollers")):
622  for i in range(f.attrs.get("ncontrollers")):
623  config.p_controllers.append(ao.Param_controller())
624  config.p_controllers[i].set_type(str(f.attrs.get("type")[i]))
625  config.p_controllers[i].set_nwfs(f.attrs.get("control.nwfs")[i])
626  config.p_controllers[i].set_ndm(f.attrs.get("ndm")[i])
627  config.p_controllers[i].set_maxcond(f.attrs.get("maxcond")[i])
628  config.p_controllers[i].set_delay(f.attrs.get("delay")[i])
629  config.p_controllers[i].set_gain(f.attrs.get("gain")[i])
630  config.p_controllers[i].set_modopti(f.attrs.get("modopti")[i])
631  config.p_controllers[i].set_nrec(f.attrs.get("nrec")[i])
632  config.p_controllers[i].set_nmodes(f.attrs.get("nmodes")[i])
633  config.p_controllers[i].set_gmin(f.attrs.get("gmin")[i])
634  config.p_controllers[i].set_gmax(f.attrs.get("gmax")[i])
635  config.p_controllers[i].set_ngain(f.attrs.get("ngain")[i])
636  config.p_controllers[i].set_TTcond(f.attrs.get("TTcond")[i])
637  config.p_controllers[i].set_cured_ndivs(f.attrs.get("cured_ndivs")[i])
638  config.p_rtc.set_controllers(config.p_controllers)
639 
640  config.p_rtc.set_nwfs(f.attrs.get("nwfs"))
641 
642  print("Parameters have been read from ", filename, "header")
643 
644 
645 def writeHdf5SingleDataset(filename, data, datasetName="dataset"):
646  """ Write a hdf5 file containig a single field
647 
648  If the file already exists, it will be overwritten
649 
650  :parametres:
651 
652  filename: (str) : name of the file to write
653 
654  data: (np.ndarray) : content of the file
655 
656  datasetName: (str) : name of the dataset to write (default="dataset")
657  """
658 
659  f = h5py.File(filename, "w")
660  f.create_dataset(datasetName, data=data)
661  f.close()
662 
663 
664 def readHdf5SingleDataset(filename, datasetName="dataset"):
665  """ Read a single dataset from an hdf5 file
666 
667  :parameters:
668 
669  filename: (str) : name of the file to read from
670 
671  datasetName: (str) : name of the dataset to read (default="dataset")
672  """
673 
674  f = h5py.File(filename, "r")
675  data = f[datasetName][:]
676  f.close()
677  return data
678 
679 
680 def load_AB_from_dataBase(database, ind):
681  """ Read and return A, B, istx and isty from the database
682 
683  :parameters:
684 
685  database: (dict): dictionary containing paths to matrices to load
686 
687  ind: (int): layer index
688  """
689  print("loading", database["A"])
690  f = h5py.File(database["A"], 'r')
691  A = f["A_" + str(ind)][:]
692  B = f["B_" + str(ind)][:]
693  istx = f["istx_" + str(ind)][:]
694  isty = f["isty_" + str(ind)][:]
695  f.close()
696 
697  return A, B, istx, isty
698 
699 
700 def save_AB_in_database(k, A, B, istx, isty):
701  """ Save A, B, istx and isty in the database
702 
703  :parameters:
704 
705  ind:
706 
707  A:
708 
709  B:
710 
711  istx:
712 
713  isty:
714  """
715  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).decode('utf8').strip()
716  print("writing files and updating database")
717  df = pandas.read_hdf(
718  os.getenv('SHESHA_ROOT') + "/data/dataBase/matricesDataBase.h5", "A")
719  ind = len(df.index) - 1
720  savename = os.getenv('SHESHA_ROOT') + "/data/dataBase/turbu/A_" + \
721  commit + "_" + str(ind) + ".h5"
722  save_hdf5(savename, "A_" + str(k), A)
723  save_hdf5(savename, "B_" + str(k), B)
724  save_hdf5(savename, "istx_" + str(k), istx)
725  save_hdf5(savename, "isty_" + str(k), isty)
726 
727 
728 def load_dm_geom_from_dataBase(database, ndm):
729  """ Read and return the DM geometry
730 
731  :parameters:
732 
733  database: (dict): dictionary containing paths to matrices to load
734 
735  ndm: (int): dm index
736  """
737  print("loading", database["dm"])
738  f = h5py.File(database["dm"], 'r')
739  influpos = f["influpos_" + str(ndm)][:]
740  ninflu = f["ninflu_" + str(ndm)][:]
741  influstart = f["influstart_" + str(ndm)][:]
742  i1 = f["i1_" + str(ndm)][:]
743  j1 = f["j1_" + str(ndm)][:]
744  ok = f["ok_" + str(ndm)][:]
745  f.close()
746 
747  return influpos, ninflu, influstart, i1, j1, ok
748 
749 
750 def save_dm_geom_in_dataBase(ndm, influpos, ninflu, influstart, i1, j1, ok):
751  """ Save the DM geometry in the database
752 
753  :parameters:
754 
755  ndm:
756 
757  influpos:
758 
759  ninflu:
760 
761  influstart:
762 
763  i1:
764 
765  j1:
766  """
767  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).decode('utf8').strip()
768  print("writing files and updating database")
769  df = pandas.read_hdf(
770  os.getenv('SHESHA_ROOT') + "/data/dataBase/matricesDataBase.h5", "dm")
771  ind = len(df.index) - 1
772  savename = os.getenv('SHESHA_ROOT') + "/data/dataBase/mat/dm_" + \
773  commit + "_" + str(ind) + ".h5"
774  save_hdf5(savename, "influpos_" + str(ndm), influpos)
775  save_hdf5(savename, "ninflu_" + str(ndm), ninflu)
776  save_hdf5(savename, "influstart_" + str(ndm), influstart)
777  save_hdf5(savename, "i1_" + str(ndm), i1)
778  save_hdf5(savename, "j1_" + str(ndm), j1)
779  save_hdf5(savename, "ok_" + str(ndm), ok)
780 
781 
782 def load_imat_from_dataBase(database):
783  """ Read and return the imat
784 
785  :parameters:
786 
787  database: (dict): dictionary containing paths to matrices to load
788  """
789  print("loading", database["imat"])
790  f = h5py.File(database["imat"], 'r')
791  imat = f["imat"][:]
792  f.close()
793 
794  return imat
795 
796 
797 def save_imat_in_dataBase(imat):
798  """ Save the DM geometry in the database
799 
800  :parameters:
801 
802  imat: (np.ndarray): imat to save
803  """
804  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).decode('utf8').strip()
805  print("writing files and updating database")
806  df = pandas.read_hdf(
807  os.getenv('SHESHA_ROOT') + "/data/dataBase/matricesDataBase.h5", "imat")
808  ind = len(df.index) - 1
809  savename = os.getenv('SHESHA_ROOT') + "/data/dataBase/mat/imat_" + \
810  commit + "_" + str(ind) + ".h5"
811  save_hdf5(savename, "imat", imat)
shesha.util.hdf5_util.load_AB_from_dataBase
def load_AB_from_dataBase(database, ind)
Read and return A, B, istx and isty from the database.
Definition: hdf5_util.py:688
shesha.util.hdf5_util.updateDataBase
def updateDataBase(h5file, savepath, matrix_type)
Update the database adding a new row to the matrix_type database.
Definition: hdf5_util.py:209
shesha.util.hdf5_util.init_hdf5_files
def init_hdf5_files(savepath, param_dict, matricesToLoad)
TODO docstring.
Definition: hdf5_util.py:150
shesha.util.hdf5_util.validDataBase
def validDataBase(savepath, matricesToLoad)
TODO docstring.
Definition: hdf5_util.py:480
shesha.util.hdf5_util.save_imat_in_dataBase
def save_imat_in_dataBase(imat)
Save the DM geometry in the database.
Definition: hdf5_util.py:803
shesha.util.hdf5_util.readHdf5SingleDataset
def readHdf5SingleDataset(filename, datasetName="dataset")
Read a single dataset from an hdf5 file.
Definition: hdf5_util.py:672
shesha.util.hdf5_util.save_AB_in_database
def save_AB_in_database(k, A, B, istx, isty)
Save A, B, istx and isty in the database.
Definition: hdf5_util.py:714
shesha.util.hdf5_util.params_dictionary
def params_dictionary(config)
Create and returns a dictionary of all the config parameters with the corresponding keys for further ...
Definition: hdf5_util.py:86
shesha.util.hdf5_util.save_h5
def save_h5(filename, dataname, config, data)
save_hdf5(filename, dataname, config, data) Create a hdf5 file and store data in it with full header ...
Definition: hdf5_util.py:257
shesha.util.hdf5_util.save_dm_geom_in_dataBase
def save_dm_geom_in_dataBase(ndm, influpos, ninflu, influstart, i1, j1, ok)
Save the DM geometry in the database.
Definition: hdf5_util.py:766
shesha.util.hdf5_util.load_imat_from_dataBase
def load_imat_from_dataBase(database)
Read and return the imat.
Definition: hdf5_util.py:788
shesha.util.hdf5_util.writeHdf5SingleDataset
def writeHdf5SingleDataset(filename, data, datasetName="dataset")
Write a hdf5 file containig a single field.
Definition: hdf5_util.py:657
shesha.util.hdf5_util.configFromH5
def configFromH5(filename, config)
TODO docstring.
Definition: hdf5_util.py:511
shesha.util.hdf5_util.checkMatricesDataBase
def checkMatricesDataBase(savepath, config, param_dict)
Check in the database if the current config have been already run.
Definition: hdf5_util.py:282
shesha.util.hdf5_util.updateParamDict
def updateParamDict(pdict, pClass, prefix)
Definition: hdf5_util.py:49
shesha.util.hdf5_util.checkControlParams
def checkControlParams(savepath, config, pdict, matricesToLoad)
Compare the current controller parameters to the database.
Definition: hdf5_util.py:366
shesha.util.hdf5_util.initDataBase
def initDataBase(savepath, param_dict)
Initialize and create the database for all the saved matrices.
Definition: hdf5_util.py:184
shesha.util.hdf5_util.checkTurbuParams
def checkTurbuParams(savepath, config, pdict, matricesToLoad)
Compare the current turbulence parameters to the database.
Definition: hdf5_util.py:309
shesha.util.hdf5_util.save_hdf5
def save_hdf5(filename, dataname, data)
save_hdf5(filename, dataname, data) Create a dataset in an existing hdf5 file filename and store data...
Definition: hdf5_util.py:237
shesha.util.hdf5_util.checkDmsParams
def checkDmsParams(savepath, config, pdict, matricesToLoad)
Compare the current controller parameters to the database.
Definition: hdf5_util.py:430
shesha.util.hdf5_util.validFile
def validFile(filename)
TODO docstring.
Definition: hdf5_util.py:493
shesha.util.hdf5_util.load_dm_geom_from_dataBase
def load_dm_geom_from_dataBase(database, ndm)
Read and return the DM geometry.
Definition: hdf5_util.py:736
shesha.util.hdf5_util.validInStore
def validInStore(store, savepath, matricetype)
TODO docstring.
Definition: hdf5_util.py:501
shesha.util.hdf5_util.create_file_attributes
def create_file_attributes(filename, param_dict)
create_file_attributes(filename,config) Create an hdf5 file wtih attributes corresponding to all simu...
Definition: hdf5_util.py:130