COMPASS  5.4.4
End-to-end AO simulation tool using GPU acceleration
hdf5_util.py
1 
37 
38 import h5py
39 import pandas
40 import os
41 import numpy as np
42 from subprocess import check_output
43 
44 
45 def updateParamDict(pdict, pClass, prefix):
46  """
47  Update parameters dictionnary pdict with all the parameters of pClass.
48  Prefix must be set to define the key value of the new dict entries
49  """
50  if (isinstance(pClass, list)):
51  params = pClass[0].__dict__.keys()
52  for k in params:
53  pdict.update({
54  k: [
55  p.__dict__[k].encode("utf8") if isinstance(
56  p.__dict__[k], str) else
57  p.__dict__[k] for p in pClass
58  ]
59  })
60 
61  else:
62  params = pClass.__dict__.keys()
63 
64  for k in params:
65  if isinstance(pClass.__dict__[k], str):
66  pdict.update({k: pClass.__dict__[k].encode("utf8")})
67  else:
68  pdict.update({k: pClass.__dict__[k]})
69 
70 
71 def params_dictionary(config):
72  """ Create and returns a dictionary of all the config parameters with the
73  corresponding keys for further creation of database and save files
74 
75  :param config: (module) : simulation parameters
76 
77  :return param_dict: (dictionary) : dictionary of parameters
78  """
79 
80  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).strip()
81 
82  param_dict = {"simul_name": config.simul_name.encode('utf8'), "commit": commit}
83 
84  updateParamDict(param_dict, config.p_loop, "_Param_loop__")
85  updateParamDict(param_dict, config.p_geom, "_Param_geom__")
86  updateParamDict(param_dict, config.p_tel, "_Param_tel__")
87  if config.p_atmos is not None:
88  updateParamDict(param_dict, config.p_atmos, "_Param_atmos__")
89  if config.p_targets is not None:
90  updateParamDict(param_dict, config.p_targets, "_Param_target__")
91  param_dict.update({"ntargets": len(config.p_targets)})
92  if config.p_wfss is not None:
93  updateParamDict(param_dict, config.p_wfss, "_Param_wfs__")
94  param_dict.update({"nwfs": len(config.p_wfss)})
95  if config.p_dms is not None:
96  updateParamDict(param_dict, config.p_dms, "_Param_dm__")
97  param_dict.update({"ndms": len(config.p_dms)})
98  if config.p_controllers is not None:
99  updateParamDict(param_dict, config.p_controllers, "_Param_controller__")
100  param_dict.update({"ncontrollers": len(config.p_controllers)})
101  if config.p_centroiders is not None:
102  updateParamDict(param_dict, config.p_centroiders, "_Param_centroider__")
103  param_dict.update({"ncentroiders": len(config.p_centroiders)})
104 
105  for k in param_dict.keys():
106  if type(param_dict[k]) is list:
107  param_dict[k] = [d if d is not None else -10 for d in param_dict[k]]
108  elif param_dict[k] is None:
109  param_dict[k] = -10
110  return param_dict
111 
112 
113 def create_file_attributes(filename, param_dict):
114  """ create_file_attributes(filename,config)
115  Create an hdf5 file wtih attributes corresponding to all simulation parameters
116 
117  :param:
118 
119  filename : (str) : full path + filename to create
120 
121  config : () : simulation parameters
122  """
123  f = h5py.File(filename, "w")
124 
125  for i in list(param_dict.keys()):
126  if (isinstance(param_dict[i], str)):
127  attr = param_dict[i].encode("utf-8")
128  elif (isinstance(param_dict[i], list)):
129  attr = [
130  s.encode("utf-8") if isinstance(s, str) else s for s in param_dict[i]
131  ]
132  else:
133  attr = param_dict[i]
134  if(isinstance(attr, np.ndarray)):
135  save_hdf5(filename, i, attr)
136  elif(isinstance(attr, list)):
137  if(isinstance(attr[0], np.ndarray)):
138  for k,data in enumerate(attr):
139  save_hdf5(filename, i + str(k), data)
140  else:
141  f.attrs.create(i, attr)
142  else:
143  f.attrs.create(i, attr)
144  f.attrs.create("validity", False)
145  print(filename, "initialized")
146  f.close()
147 
148 
149 def init_hdf5_files(savepath, param_dict, matricesToLoad):
150  """ TODO: docstring
151  """
152  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).decode('utf8').strip()
153  # if not(matricesToLoad.has_key("A")):
154  if "A" not in matricesToLoad:
155  df = pandas.read_hdf(savepath + "matricesDataBase.h5", "A")
156  ind = len(df.index)
157  filename = savepath + "turbu/A_" + commit + "_" + str(ind) + ".h5"
158  create_file_attributes(filename, param_dict)
159  updateDataBase(filename, savepath, "A")
160 
161  if not ("dm" in matricesToLoad):
162  df = pandas.read_hdf(savepath + "matricesDataBase.h5", "dm")
163  ind = len(df.index)
164  filename = savepath + "mat/dm_" + commit + "_" + str(ind) + ".h5"
165  create_file_attributes(filename, param_dict)
166  updateDataBase(filename, savepath, "dm")
167 
168  if not ("imat" in matricesToLoad):
169  df = pandas.read_hdf(savepath + "matricesDataBase.h5", "imat")
170  ind = len(df.index)
171  filename = savepath + "mat/imat_" + commit + "_" + str(ind) + ".h5"
172  create_file_attributes(filename, param_dict)
173  updateDataBase(filename, savepath, "imat")
174 
175 
176 def initDataBase(savepath, param_dict):
177  """ Initialize and create the database for all the saved matrices. This database
178  will be placed on the top of the savepath and be named matricesDataBase.h5.
179 
180  Args:
181 
182  savepath : (str) : path to the data repertory
183 
184  param_dict : (dictionary) : parameters dictionary
185  """
186  keys = list(param_dict.keys())
187  keys.append("path2file")
188  keys.append("validity")
189  df = pandas.DataFrame(columns=keys)
190  store = pandas.HDFStore(savepath + "matricesDataBase.h5")
191  store.put("A", df)
192  store.put("imat", df)
193  store.put("dm", df)
194  store.close()
195  print("Matrices database created")
196 
197 
198 def updateDataBase(h5file, savepath, matrix_type):
199  """ Update the database adding a new row to the matrix_type database.
200 
201  Args:
202 
203  h5file : (str) : path to the new h5 file to add
204 
205  savepath : (str) : path to the data directory
206 
207  matrix_type : (str) : type of matrix to store ("A","B","istx","isty"
208  "istx","eigenv","imat","U"
209  "pztok" or "pztnok")
210  """
211  if (matrix_type == "A" or matrix_type == "imat" or matrix_type == "dm"):
212  f = h5py.File(h5file, "r")
213  store = pandas.HDFStore(savepath + "matricesDataBase.h5")
214  df = store[matrix_type]
215  ind = len(df.index)
216  for i in list(f.attrs.keys()):
217  df.loc[ind, i] = f.attrs[i]
218  df.loc[ind, "path2file"] = h5file
219  df.loc[ind, "validity"] = False
220  store.put(matrix_type, df)
221  store.close()
222  f.close()
223  else:
224  raise ValueError("Wrong matrix_type specified. See documentation")
225 
226 
227 def save_hdf5(filename, dataname, data):
228  """ save_hdf5(filename, dataname, data)
229  Create a dataset in an existing hdf5 file filename and store data in it
230 
231  :param:
232 
233  filename: (str) : full path to the file
234 
235  dataname : (str) : name of the data (imat, cmat...)
236 
237  data : np.array : data to save
238  """
239  f = h5py.File(filename, "r+")
240  f.create_dataset(dataname, data=data)
241  f.close()
242 
243 
244 def save_h5(filename, dataname, config, data):
245  """ save_hdf5(filename, dataname, config, data)
246  Create a hdf5 file and store data in it with full header from config parameters
247  Usefull to backtrace data origins
248 
249  :param:
250 
251  filename: (str) : full path to the file
252 
253  dataname : (str) : name of the data (imat, cmat...)
254 
255  config : (module) : config parameters
256 
257  data : np.array : data to save
258  """
259  p_dict = params_dictionary(config)
260  create_file_attributes(filename, p_dict)
261  save_hdf5(filename, dataname, data)
262  print(filename, "has been written")
263 
264 
265 def checkMatricesDataBase(savepath, config, param_dict):
266  """ Check in the database if the current config have been already run. If so,
267  return a dictionary containing the matrices to load and their path. Matrices
268  which don't appear in the dictionary will be computed, stored and added
269  to the database during the simulation.
270  If the database doesn't exist, this function creates it.
271 
272  Args:
273 
274  savepath : (str) : path to the data repertory
275 
276  config : (module) : simulation parameters
277 
278  param_dict : (dictionary) : parameters dictionary
279 
280  :return:
281 
282  matricesToLoad : (dictionary) : matrices that will be load and their path
283  """
284 
285  matricesToLoad = {}
286  if (os.path.exists(savepath + "matricesDataBase.h5")):
287  checkTurbuParams(savepath, config, param_dict, matricesToLoad)
288  checkDmsParams(savepath, config, param_dict, matricesToLoad)
289  # if(matricesToLoad.has_key("pztok")):
290  if "dm" in matricesToLoad:
291  checkControlParams(savepath, config, param_dict, matricesToLoad)
292 
293  else:
294  initDataBase(savepath, param_dict)
295  init_hdf5_files(savepath, param_dict, matricesToLoad)
296  return matricesToLoad
297 
298 
299 def checkTurbuParams(savepath, config, pdict, matricesToLoad):
300  """ Compare the current turbulence parameters to the database. If similar parameters
301  are found, the matricesToLoad dictionary is completed.
302  Since all the turbulence matrices are computed together, we only check the parameters
303  for the A matrix : if we load A, we load B, istx and isty too.
304 
305  Args:
306 
307  config : (module) : simulation parameters
308 
309  matricesToLoad : (dictionary) : matrices that will be load and their path
310  """
311  dataBase = pandas.read_hdf(savepath + "matricesDataBase.h5", "A")
312  param2test = [
313  "_Param_atmos__r0", "_Param_atmos__seeds", "_Param_atmos__L0",
314  "_Param_atmos__alt", "_Param_tel__diam", "_Param_tel__cobs",
315  "_Param_geom__pupdiam", "_Param_geom__zenithangle", "_Param_target__xpos",
316  "_Param_target__ypos", "_Param_wfs__xpos", "_Param_wfs__ypos"
317  ]
318 
319  for i in dataBase.index:
320  cc = 0
321  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).strip()
322  if (dataBase.loc[i, "validity"] and (dataBase.loc[i, "commit"] == commit)):
323  cond = True
324  while (cond):
325  if (cc >= len(param2test)):
326  break
327  else:
328  cond = dataBase.loc[i, param2test[cc]] == pdict[param2test[cc]]
329  if type(cond) is np.ndarray:
330  cond = cond.all()
331  cc += 1
332  # For debug
333 
334  if not cond:
335  cc -= 1
336  print(param2test[cc] + " has changed from ",
337  dataBase.loc[i, param2test[cc]], " to ", pdict[param2test[cc]])
338 
339  else:
340  cond = False
341 
342  if (cond):
343  matricesToLoad["index_turbu"] = i
344  matricesToLoad["A"] = dataBase.loc[i, "path2file"]
345  # dataBase = pandas.read_hdf(savepath + "matricesDataBase.h5", "B")
346  # matricesToLoad["B"] = dataBase.loc[i, "path2file"]
347  # dataBase = pandas.read_hdf(savepath + "matricesDataBase.h5",
348  # "istx")
349  # matricesToLoad["istx"] = dataBase.loc[i, "path2file"]
350  # dataBase = pandas.read_hdf(savepath + "matricesDataBase.h5",
351  # "isty")
352  # matricesToLoad["isty"] = dataBase.loc[i, "path2file"]
353  return
354 
355 
356 def checkControlParams(savepath, config, pdict, matricesToLoad):
357  """ Compare the current controller parameters to the database. If similar parameters
358  are found, matricesToLoad dictionary is completed.
359  Since all the controller matrices are computed together, we only check the parameters
360  for the imat matrix : if we load imat, we load eigenv and U too.
361 
362  Args:
363 
364  config : (module) : simulation parameters
365 
366  matricesToLoad : (dictionary) : matrices that will be load and their path
367  """
368  dataBase = pandas.read_hdf(savepath + "matricesDataBase.h5", "imat")
369 
370  param2test = [
371  "_Param_tel__diam", "_Param_tel__t_spiders", "_Param_tel__spiders_type",
372  "_Param_tel__pupangle", "_Param_tel__referr", "_Param_tel__std_piston",
373  "_Param_tel__std_tt", "_Param_tel__type_ap", "_Param_tel__nbrmissing",
374  "_Param_tel__cobs", "_Param_geom__pupdiam", "nwfs", "_Param_wfs__type",
375  "_Param_wfs__nxsub", "_Param_wfs__npix", "_Param_wfs__pixsize",
376  "_Param_wfs__fracsub", "_Param_wfs__xpos", "_Param_wfs__ypos",
377  "_Param_wfs__Lambda", "_Param_wfs__dms_seen", "_Param_wfs__fssize",
378  "_Param_wfs__fstop", "_Param_wfs__pyr_ampl", "_Param_wfs__pyr_loc",
379  "_Param_wfs__pyr_npts", "_Param_wfs__pyr_pup_sep", "_Param_wfs__pyrtype",
380  "ndms", "_Param_dm__type", "_Param_dm__alt", "_Param_dm__coupling",
381  "_Param_dm__margin_in", "_Param_dm__margin_out", "_Param_dm__nact",
382  "_Param_dm__nkl", "_Param_dm__type_kl", "_Param_dm__push4imat",
383  "_Param_dm__thresh", "_Param_dm__unitpervolt", "ncentroiders",
384  "_Param_centroider__type", "_Param_centroider__nmax",
385  "_Param_centroider__nwfs", "_Param_centroider__sizex",
386  "_Param_centroider__sizey", "_Param_centroider__thresh",
387  "_Param_centroider__type_fct", "_Param_centroider__weights",
388  "_Param_centroider__width"
389  ]
390 
391  for i in dataBase.index:
392  cc = 0
393  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).strip()
394  if (dataBase.loc[i, "validity"] and (dataBase.loc[i, "commit"] == commit)):
395  cond = True
396  while (cond):
397  if (cc >= len(param2test)):
398  break
399  else:
400  cond = dataBase.loc[i, param2test[cc]] == pdict[param2test[cc]]
401  if type(cond) is np.ndarray:
402  cond = cond.all()
403  cc += 1
404  # For debug
405 
406  if not cond:
407  cc -= 1
408  print(param2test[cc] + " has changed from ",
409  dataBase.loc[i, param2test[cc]], " to ", pdict[param2test[cc]])
410 
411  else:
412  cond = False
413 
414  if (cond):
415  matricesToLoad["index_control"] = i
416  matricesToLoad["imat"] = dataBase.loc[i, "path2file"]
417  return
418 
419 
420 def checkDmsParams(savepath, config, pdict, matricesToLoad):
421  """ Compare the current controller parameters to the database. If similar parameters
422  are found, matricesToLoad dictionary is completed.
423  Since all the dms matrices are computed together, we only check the parameters
424  for the pztok matrix : if we load pztok, we load pztnok too.
425 
426  Args:
427 
428  config : (module) : simulation parameters
429 
430  matricesToLoad : (dictionary) : matrices that will be load and their path
431  """
432  dataBase = pandas.read_hdf(savepath + "matricesDataBase.h5", "dm")
433 
434  param2test = [
435  "_Param_tel__diam", "_Param_tel__t_spiders", "_Param_tel__spiders_type",
436  "_Param_tel__pupangle", "_Param_tel__referr", "_Param_tel__std_piston",
437  "_Param_tel__std_tt", "_Param_tel__type_ap", "_Param_tel__nbrmissing",
438  "_Param_tel__cobs", "_Param_geom__pupdiam", "nwfs", "_Param_wfs__type",
439  "_Param_wfs__nxsub", "_Param_wfs__npix", "_Param_wfs__pixsize",
440  "_Param_wfs__fracsub", "_Param_wfs__xpos", "_Param_wfs__ypos",
441  "_Param_wfs__Lambda", "_Param_wfs__dms_seen", "_Param_wfs__fssize",
442  "_Param_wfs__fstop", "_Param_wfs__pyr_ampl", "_Param_wfs__pyr_loc",
443  "_Param_wfs__pyr_npts", "_Param_wfs__pyrtype", "_Param_wfs__pyr_pup_sep",
444  "ndms", "_Param_dm__type", "_Param_dm__alt", "_Param_dm__coupling",
445  "_Param_dm__margin_in", "_Param_dm__margin_out", "_Param_dm__nkl",
446  "_Param_dm__nact", "_Param_dm__type_kl", "_Param_dm__push4imat",
447  "_Param_dm__thresh", "_Param_dm__unitpervolt"
448  ]
449 
450  for i in dataBase.index:
451  cc = 0
452  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).strip()
453  if (dataBase.loc[i, "validity"] and (dataBase.loc[i, "commit"] == commit)):
454  cond = True
455  while (cond):
456  if (cc >= len(param2test)):
457  break
458  else:
459  cond = dataBase.loc[i, param2test[cc]] == pdict[param2test[cc]]
460  if type(cond) is np.ndarray:
461  cond = cond.all()
462  cc += 1
463  # For debug
464 
465  if not cond:
466  cc -= 1
467  print((param2test[cc] + " has changed from ",
468  dataBase.loc[i, param2test[cc]], " to ", pdict[param2test[cc]]))
469 
470  else:
471  cond = False
472 
473  if (cond):
474  matricesToLoad["index_dms"] = i
475  matricesToLoad["dm"] = dataBase.loc[i, "path2file"]
476  return
477 
478 
479 def validDataBase(savepath, matricesToLoad):
480  """ TODO: docstring
481  """
482  store = pandas.HDFStore(savepath + "matricesDataBase.h5")
483  if not ("A" in matricesToLoad):
484  validInStore(store, savepath, "A")
485  if not ("dm" in matricesToLoad):
486  validInStore(store, savepath, "dm")
487  if not ("imat" in matricesToLoad):
488  validInStore(store, savepath, "imat")
489  store.close()
490 
491 
492 def validFile(filename):
493  """ TODO: docstring
494  """
495  f = h5py.File(filename, "r+")
496  f.attrs["validity"] = True
497  f.close()
498 
499 
500 def validInStore(store, savepath, matricetype):
501  """ TODO: docstring
502  """
503  df = store[matricetype]
504  ind = len(df.index) - 1
505  df.loc[ind, "validity"] = True
506  store[matricetype] = df
507  validFile(df.loc[ind, "path2file"])
508 
509 
510 def configFromH5(filename, config):
511  """ TODO: docstring
512  """
513  #import shesha as ao
514 
515  f = h5py.File(filename, "r")
516 
517  config.simul_name = str(f.attrs.get("simulname"))
518  # Loop
519  config.p_loop.set_niter(f.attrs.get("niter"))
520  config.p_loop.set_ittime(f.attrs.get("ittime"))
521 
522  # geom
523  config.p_geom.set_zenithangle(f.attrs.get("zenithangle"))
524  config.p_geom.set_pupdiam(f.attrs.get("pupdiam"))
525 
526  # Tel
527  config.p_tel.set_diam(f.attrs.get("tel_diam"))
528  config.p_tel.set_cobs(f.attrs.get("cobs"))
529  config.p_tel.set_nbrmissing(f.attrs.get("nbrmissing"))
530  config.p_tel.set_t_spiders(f.attrs.get("t_spiders"))
531  config.p_tel.set_type_ap(str(f.attrs.get("type_ap")))
532  config.p_tel.set_spiders_type(str(f.attrs.get("spiders_type")))
533  config.p_tel.set_pupangle(f.attrs.get("pupangle"))
534  config.p_tel.set_referr(f.attrs.get("referr"))
535  config.p_tel.set_std_piston(f.attrs.get("std_piston"))
536  config.p_tel.set_std_tt(f.attrs.get("std_tt"))
537 
538  # Atmos
539  config.p_atmos.set_r0(f.attrs.get("r0"))
540  config.p_atmos.set_nscreens(f.attrs.get("nscreens"))
541  config.p_atmos.set_frac(f.attrs.get("frac"))
542  config.p_atmos.set_alt(f.attrs.get("atm.alt"))
543  config.p_atmos.set_windspeed(f.attrs.get("windspeed"))
544  config.p_atmos.set_winddir(f.attrs.get("winddir"))
545  config.p_atmos.set_L0(f.attrs.get("L0"))
546  config.p_atmos.set_seeds(f.attrs.get("seeds"))
547 
548  # Target
549  config.p_target.set_nTargets(f.attrs.get("ntargets"))
550  config.p_target.set_xpos(f.attrs.get("target.xpos"))
551  config.p_target.set_ypos(f.attrs.get("target.ypos"))
552  config.p_target.set_Lambda(f.attrs.get("target.Lambda"))
553  config.p_target.set_mag(f.attrs.get("target.mag"))
554  if (f.attrs.get("target.dms_seen") > -1):
555  config.p_target.set_dms_seen(f.attrs.get("target.dms_seen"))
556 
557  # WFS
558  config.p_wfss = []
559  for i in range(f.attrs.get("nwfs")):
560  config.p_wfss.append(config.Param_wfs())
561  config.p_wfss[i].set_type(str(f.attrs.get("type")[i]))
562  config.p_wfss[i].set_nxsub(f.attrs.get("nxsub")[i])
563  config.p_wfss[i].set_npix(f.attrs.get("npix")[i])
564  config.p_wfss[i].set_pixsize(f.attrs.get("pixsize")[i])
565  config.p_wfss[i].set_fracsub(f.attrs.get("fracsub")[i])
566  config.p_wfss[i].set_xpos(f.attrs.get("wfs.xpos")[i])
567  config.p_wfss[i].set_ypos(f.attrs.get("wfs.ypos")[i])
568  config.p_wfss[i].set_Lambda(f.attrs.get("wfs.Lambda")[i])
569  config.p_wfss[i].set_gsmag(f.attrs.get("gsmag")[i])
570  config.p_wfss[i].set_optthroughput(f.attrs.get("optthroughput")[i])
571  config.p_wfss[i].set_zerop(f.attrs.get("zerop")[i])
572  config.p_wfss[i].set_noise(f.attrs.get("noise")[i])
573  config.p_wfss[i].set_atmos_seen(f.attrs.get("atmos_seen")[i])
574  config.p_wfss[i].set_fstop(str(f.attrs.get("fstop")[i]))
575  config.p_wfss[i].set_pyr_npts(f.attrs.get("pyr_npts")[i])
576  config.p_wfss[i].set_pyr_ampl(f.attrs.get("pyr_ampl")[i])
577  config.p_wfss[i].set_pyrtype(str(f.attrs.get("pyrtype")[i]))
578  config.p_wfss[i].set_pyr_loc(str(f.attrs.get("pyr_loc")[i]))
579  config.p_wfss[i].set_fssize(f.attrs.get("fssize")[i])
580  if ((f.attrs.get("dms_seen")[i] > -1).all()):
581  config.p_wfss[i].set_dms_seen(f.attrs.get("dms_seen")[i])
582 
583  # LGS
584  config.p_wfss[i].set_gsalt(f.attrs.get("gsalt")[i])
585  config.p_wfss[i].set_lltx(f.attrs.get("lltx")[i])
586  config.p_wfss[i].set_llty(f.attrs.get("llty")[i])
587  config.p_wfss[i].set_laserpower(f.attrs.get("laserpower")[i])
588  config.p_wfss[i].set_lgsreturnperwatt(f.attrs.get("lgsreturnperwatt")[i])
589  config.p_wfss[i].set_proftype(str(f.attrs.get("proftype")[i]))
590  config.p_wfss[i].set_beamsize(f.attrs.get("beamsize")[i])
591 
592  # DMs
593  config.p_dms = []
594  if (f.attrs.get("ndms")):
595  for i in range(f.attrs.get("ndms")):
596  config.p_dms.append(config.Param_dm())
597  config.p_dms[i].set_type(str(f.attrs.get("type")[i]))
598  config.p_dms[i].set_nact(f.attrs.get("nact")[i])
599  config.p_dms[i].set_alt(f.attrs.get("dm.alt")[i])
600  config.p_dms[i].set_thresh(f.attrs.get("dm.thresh")[i])
601  config.p_dms[i].set_coupling(f.attrs.get("coupling")[i])
602  config.p_dms[i].set_unitpervolt(f.attrs.get("unitpervolt")[i])
603  config.p_dms[i].set_push4imat(f.attrs.get("push4imat")[i])
604 
605  # Centroiders
606  config.p_centroiders = []
607  if (f.attrs.get("ncentroiders")):
608  for i in range(f.attrs.get("ncentroiders")):
609  config.p_centroiders.append(config.Param_centroider())
610  config.p_centroiders[i].set_nwfs(f.attrs.get("centro.nwfs")[i])
611  config.p_centroiders[i].set_type(str(f.attrs.get("type")[i]))
612  config.p_centroiders[i].set_type_fct(str(f.attrs.get("type_fct")[i]))
613  config.p_centroiders[i].set_nmax(f.attrs.get("nmax")[i])
614  config.p_centroiders[i].set_thresh(f.attrs.get("centroider.thresh")[i])
615  if (f.attrs.get("weights")[i]):
616  config.p_centroiders[i].set_weights(f.attrs.get("weights")[i])
617  config.p_centroiders[i].set_width(f.attrs.get("width")[i])
618  config.p_rtc.set_centroiders(config.p_centroiders)
619 
620  # Controllers
621  config.p_controllers = []
622  if (f.attrs.get("ncontrollers")):
623  for i in range(f.attrs.get("ncontrollers")):
624  config.p_controllers.append(config.Param_controller())
625  config.p_controllers[i].set_type(str(f.attrs.get("type")[i]))
626  config.p_controllers[i].set_nwfs(f.attrs.get("control.nwfs")[i])
627  config.p_controllers[i].set_ndm(f.attrs.get("ndm")[i])
628  config.p_controllers[i].set_maxcond(f.attrs.get("maxcond")[i])
629  config.p_controllers[i].set_delay(f.attrs.get("delay")[i])
630  config.p_controllers[i].set_gain(f.attrs.get("gain")[i])
631  config.p_controllers[i].set_modopti(f.attrs.get("modopti")[i])
632  config.p_controllers[i].set_nrec(f.attrs.get("nrec")[i])
633  config.p_controllers[i].set_nmodes(f.attrs.get("nmodes")[i])
634  config.p_controllers[i].set_gmin(f.attrs.get("gmin")[i])
635  config.p_controllers[i].set_gmax(f.attrs.get("gmax")[i])
636  config.p_controllers[i].set_ngain(f.attrs.get("ngain")[i])
637  config.p_controllers[i].set_TTcond(f.attrs.get("TTcond")[i])
638  config.p_controllers[i].set_cured_ndivs(f.attrs.get("cured_ndivs")[i])
639  config.p_rtc.set_controllers(config.p_controllers)
640 
641  config.p_rtc.set_nwfs(f.attrs.get("nwfs"))
642 
643  print("Parameters have been read from ", filename, "header")
644 
645 
646 def writeHdf5SingleDataset(filename, data, datasetName="dataset"):
647  """ Write a hdf5 file containig a single field
648 
649  If the file already exists, it will be overwritten
650 
651  :parametres:
652 
653  filename: (str) : name of the file to write
654 
655  data: (np.ndarray) : content of the file
656 
657  datasetName: (str) : name of the dataset to write (default="dataset")
658  """
659 
660  f = h5py.File(filename, "w")
661  f.create_dataset(datasetName, data=data)
662  f.close()
663 
664 
665 def readHdf5SingleDataset(filename, datasetName="dataset"):
666  """ Read a single dataset from an hdf5 file
667 
668  Args:
669 
670  filename: (str) : name of the file to read from
671 
672  datasetName: (str) : name of the dataset to read (default="dataset")
673  """
674 
675  f = h5py.File(filename, "r")
676  data = f[datasetName][:]
677  f.close()
678  return data
679 
680 
681 def load_AB_from_dataBase(database, ind):
682  """ Read and return A, B, istx and isty from the database
683 
684  Args:
685 
686  database: (dict): dictionary containing paths to matrices to load
687 
688  ind: (int): layer index
689  """
690  print("loading", database["A"])
691  f = h5py.File(database["A"], 'r')
692  A = f["A_" + str(ind)][:]
693  B = f["B_" + str(ind)][:]
694  istx = f["istx_" + str(ind)][:]
695  isty = f["isty_" + str(ind)][:]
696  f.close()
697 
698  return A, B, istx, isty
699 
700 
701 def save_AB_in_database(k, A, B, istx, isty):
702  """ Save A, B, istx and isty in the database
703 
704  Args:
705 
706  ind:
707 
708  A:
709 
710  B:
711 
712  istx:
713 
714  isty:
715  """
716  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).decode('utf8').strip()
717  print("writing files and updating database")
718  df = pandas.read_hdf(
719  os.getenv('SHESHA_ROOT') + "/data/dataBase/matricesDataBase.h5", "A")
720  ind = len(df.index) - 1
721  savename = os.getenv('SHESHA_ROOT') + "/data/dataBase/turbu/A_" + \
722  commit + "_" + str(ind) + ".h5"
723  save_hdf5(savename, "A_" + str(k), A)
724  save_hdf5(savename, "B_" + str(k), B)
725  save_hdf5(savename, "istx_" + str(k), istx)
726  save_hdf5(savename, "isty_" + str(k), isty)
727 
728 
729 def load_dm_geom_from_dataBase(database, ndm):
730  """ Read and return the DM geometry
731 
732  Args:
733 
734  database: (dict): dictionary containing paths to matrices to load
735 
736  ndm: (int): dm index
737  """
738  print("loading", database["dm"])
739  f = h5py.File(database["dm"], 'r')
740  influpos = f["influpos_" + str(ndm)][:]
741  ninflu = f["ninflu_" + str(ndm)][:]
742  influstart = f["influstart_" + str(ndm)][:]
743  i1 = f["i1_" + str(ndm)][:]
744  j1 = f["j1_" + str(ndm)][:]
745  ok = f["ok_" + str(ndm)][:]
746  f.close()
747 
748  return influpos, ninflu, influstart, i1, j1, ok
749 
750 
751 def save_dm_geom_in_dataBase(ndm, influpos, ninflu, influstart, i1, j1, ok):
752  """ Save the DM geometry in the database
753 
754  Args:
755 
756  ndm:
757 
758  influpos:
759 
760  ninflu:
761 
762  influstart:
763 
764  i1:
765 
766  j1:
767  """
768  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).decode('utf8').strip()
769  print("writing files and updating database")
770  df = pandas.read_hdf(
771  os.getenv('SHESHA_ROOT') + "/data/dataBase/matricesDataBase.h5", "dm")
772  ind = len(df.index) - 1
773  savename = os.getenv('SHESHA_ROOT') + "/data/dataBase/mat/dm_" + \
774  commit + "_" + str(ind) + ".h5"
775  save_hdf5(savename, "influpos_" + str(ndm), influpos)
776  save_hdf5(savename, "ninflu_" + str(ndm), ninflu)
777  save_hdf5(savename, "influstart_" + str(ndm), influstart)
778  save_hdf5(savename, "i1_" + str(ndm), i1)
779  save_hdf5(savename, "j1_" + str(ndm), j1)
780  save_hdf5(savename, "ok_" + str(ndm), ok)
781 
782 
783 def load_imat_from_dataBase(database):
784  """ Read and return the imat
785 
786  Args:
787 
788  database: (dict): dictionary containing paths to matrices to load
789  """
790  print("loading", database["imat"])
791  f = h5py.File(database["imat"], 'r')
792  imat = f["imat"][:]
793  f.close()
794 
795  return imat
796 
797 
798 def save_imat_in_dataBase(imat):
799  """ Save the DM geometry in the database
800 
801  Args:
802 
803  imat: (np.ndarray): imat to save
804  """
805  commit = check_output(["git", "rev-parse", "--short", "HEAD"]).decode('utf8').strip()
806  print("writing files and updating database")
807  df = pandas.read_hdf(
808  os.getenv('SHESHA_ROOT') + "/data/dataBase/matricesDataBase.h5", "imat")
809  ind = len(df.index) - 1
810  savename = os.getenv('SHESHA_ROOT') + "/data/dataBase/mat/imat_" + \
811  commit + "_" + str(ind) + ".h5"
812  save_hdf5(savename, "imat", imat)
P-Class (parametres) Param_centroider.
Definition: PCENTROIDER.py:47
P-Class (parametres) Param_controller.
Definition: PCONTROLLER.py:47
P-Class (parametres) Param_dm.
Definition: PDMS.py:46
P-Class (parametres) Param_wfs.
Definition: PWFS.py:47
def save_imat_in_dataBase(imat)
Save the DM geometry in the database.
Definition: hdf5_util.py:806
def params_dictionary(config)
Create and returns a dictionary of all the config parameters with the corresponding keys for further ...
Definition: hdf5_util.py:78
def initDataBase(savepath, param_dict)
Initialize and create the database for all the saved matrices.
Definition: hdf5_util.py:185
def validInStore(store, savepath, matricetype)
TODO docstring.
Definition: hdf5_util.py:504
def readHdf5SingleDataset(filename, datasetName="dataset")
Read a single dataset from an hdf5 file.
Definition: hdf5_util.py:675
def save_h5(filename, dataname, config, data)
save_hdf5(filename, dataname, config, data) Create a hdf5 file and store data in it with full header ...
Definition: hdf5_util.py:260
def checkTurbuParams(savepath, config, pdict, matricesToLoad)
Compare the current turbulence parameters to the database.
Definition: hdf5_util.py:312
def writeHdf5SingleDataset(filename, data, datasetName="dataset")
Write a hdf5 file containig a single field.
Definition: hdf5_util.py:660
def load_imat_from_dataBase(database)
Read and return the imat.
Definition: hdf5_util.py:791
def save_dm_geom_in_dataBase(ndm, influpos, ninflu, influstart, i1, j1, ok)
Save the DM geometry in the database.
Definition: hdf5_util.py:769
def save_hdf5(filename, dataname, data)
save_hdf5(filename, dataname, data) Create a dataset in an existing hdf5 file filename and store data...
Definition: hdf5_util.py:240
def updateParamDict(pdict, pClass, prefix)
Update parameters dictionnary pdict with all the parameters of pClass.
Definition: hdf5_util.py:49
def updateDataBase(h5file, savepath, matrix_type)
Update the database adding a new row to the matrix_type database.
Definition: hdf5_util.py:212
def load_AB_from_dataBase(database, ind)
Read and return A, B, istx and isty from the database.
Definition: hdf5_util.py:691
def configFromH5(filename, config)
TODO docstring.
Definition: hdf5_util.py:514
def load_dm_geom_from_dataBase(database, ndm)
Read and return the DM geometry.
Definition: hdf5_util.py:739
def checkDmsParams(savepath, config, pdict, matricesToLoad)
Compare the current controller parameters to the database.
Definition: hdf5_util.py:433
def checkControlParams(savepath, config, pdict, matricesToLoad)
Compare the current controller parameters to the database.
Definition: hdf5_util.py:369
def save_AB_in_database(k, A, B, istx, isty)
Save A, B, istx and isty in the database.
Definition: hdf5_util.py:717
def init_hdf5_files(savepath, param_dict, matricesToLoad)
TODO docstring.
Definition: hdf5_util.py:151
def create_file_attributes(filename, param_dict)
create_file_attributes(filename,config) Create an hdf5 file wtih attributes corresponding to all simu...
Definition: hdf5_util.py:122
def validFile(filename)
TODO docstring.
Definition: hdf5_util.py:496
def validDataBase(savepath, matricesToLoad)
TODO docstring.
Definition: hdf5_util.py:483
def checkMatricesDataBase(savepath, config, param_dict)
Check in the database if the current config have been already run.
Definition: hdf5_util.py:285