42 from subprocess
import check_output
47 Update parameters dictionnary pdict with all the parameters of pClass.
48 Prefix must be set to define the key value of the new dict entries
50 if (isinstance(pClass, list)):
51 params = pClass[0].__dict__.keys()
55 p.__dict__[k].encode(
"utf8")
if isinstance(
56 p.__dict__[k], str)
else
57 p.__dict__[k]
for p
in pClass
62 params = pClass.__dict__.keys()
65 if isinstance(pClass.__dict__[k], str):
66 pdict.update({k: pClass.__dict__[k].encode(
"utf8")})
68 pdict.update({k: pClass.__dict__[k]})
72 """ Create and returns a dictionary of all the config parameters with the
73 corresponding keys for further creation of database and save files
75 :param config: (module) : simulation parameters
77 :return param_dict: (dictionary) : dictionary of parameters
80 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).strip()
82 param_dict = {
"simul_name": config.simul_name.encode(
'utf8'),
"commit": commit}
87 if config.p_atmos
is not None:
89 if config.p_targets
is not None:
91 param_dict.update({
"ntargets": len(config.p_targets)})
92 if config.p_wfss
is not None:
94 param_dict.update({
"nwfs": len(config.p_wfss)})
95 if config.p_dms
is not None:
97 param_dict.update({
"ndms": len(config.p_dms)})
98 if config.p_controllers
is not None:
99 updateParamDict(param_dict, config.p_controllers,
"_Param_controller__")
100 param_dict.update({
"ncontrollers": len(config.p_controllers)})
101 if config.p_centroiders
is not None:
102 updateParamDict(param_dict, config.p_centroiders,
"_Param_centroider__")
103 param_dict.update({
"ncentroiders": len(config.p_centroiders)})
105 for k
in param_dict.keys():
106 if type(param_dict[k])
is list:
107 param_dict[k] = [d
if d
is not None else -10
for d
in param_dict[k]]
108 elif param_dict[k]
is None:
114 """ create_file_attributes(filename,config)
115 Create an hdf5 file wtih attributes corresponding to all simulation parameters
119 filename : (str) : full path + filename to create
121 config : () : simulation parameters
123 f = h5py.File(filename,
"w")
125 for i
in list(param_dict.keys()):
126 if (isinstance(param_dict[i], str)):
127 attr = param_dict[i].encode(
"utf-8")
128 elif (isinstance(param_dict[i], list)):
130 s.encode(
"utf-8")
if isinstance(s, str)
else s
for s
in param_dict[i]
134 if(isinstance(attr, np.ndarray)):
136 elif(isinstance(attr, list)):
137 if(isinstance(attr[0], np.ndarray)):
138 for k,data
in enumerate(attr):
141 f.attrs.create(i, attr)
143 f.attrs.create(i, attr)
144 f.attrs.create(
"validity",
False)
145 print(filename,
"initialized")
152 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).decode(
'utf8').strip()
154 if "A" not in matricesToLoad:
155 df = pandas.read_hdf(savepath +
"matricesDataBase.h5",
"A")
157 filename = savepath +
"turbu/A_" + commit +
"_" + str(ind) +
".h5"
161 if not (
"dm" in matricesToLoad):
162 df = pandas.read_hdf(savepath +
"matricesDataBase.h5",
"dm")
164 filename = savepath +
"mat/dm_" + commit +
"_" + str(ind) +
".h5"
168 if not (
"imat" in matricesToLoad):
169 df = pandas.read_hdf(savepath +
"matricesDataBase.h5",
"imat")
171 filename = savepath +
"mat/imat_" + commit +
"_" + str(ind) +
".h5"
177 """ Initialize and create the database for all the saved matrices. This database
178 will be placed on the top of the savepath and be named matricesDataBase.h5.
182 savepath : (str) : path to the data repertory
184 param_dict : (dictionary) : parameters dictionary
186 keys = list(param_dict.keys())
187 keys.append(
"path2file")
188 keys.append(
"validity")
189 df = pandas.DataFrame(columns=keys)
190 store = pandas.HDFStore(savepath +
"matricesDataBase.h5")
192 store.put(
"imat", df)
195 print(
"Matrices database created")
199 """ Update the database adding a new row to the matrix_type database.
203 h5file : (str) : path to the new h5 file to add
205 savepath : (str) : path to the data directory
207 matrix_type : (str) : type of matrix to store ("A","B","istx","isty"
208 "istx","eigenv","imat","U"
211 if (matrix_type ==
"A" or matrix_type ==
"imat" or matrix_type ==
"dm"):
212 f = h5py.File(h5file,
"r")
213 store = pandas.HDFStore(savepath +
"matricesDataBase.h5")
214 df = store[matrix_type]
216 for i
in list(f.attrs.keys()):
217 df.loc[ind, i] = f.attrs[i]
218 df.loc[ind,
"path2file"] = h5file
219 df.loc[ind,
"validity"] =
False
220 store.put(matrix_type, df)
224 raise ValueError(
"Wrong matrix_type specified. See documentation")
228 """ save_hdf5(filename, dataname, data)
229 Create a dataset in an existing hdf5 file filename and store data in it
233 filename: (str) : full path to the file
235 dataname : (str) : name of the data (imat, cmat...)
237 data : np.array : data to save
239 f = h5py.File(filename,
"r+")
240 f.create_dataset(dataname, data=data)
244 def save_h5(filename, dataname, config, data):
245 """ save_hdf5(filename, dataname, config, data)
246 Create a hdf5 file and store data in it with full header from config parameters
247 Usefull to backtrace data origins
251 filename: (str) : full path to the file
253 dataname : (str) : name of the data (imat, cmat...)
255 config : (module) : config parameters
257 data : np.array : data to save
262 print(filename,
"has been written")
266 """ Check in the database if the current config have been already run. If so,
267 return a dictionary containing the matrices to load and their path. Matrices
268 which don't appear in the dictionary will be computed, stored and added
269 to the database during the simulation.
270 If the database doesn't exist, this function creates it.
274 savepath : (str) : path to the data repertory
276 config : (module) : simulation parameters
278 param_dict : (dictionary) : parameters dictionary
282 matricesToLoad : (dictionary) : matrices that will be load and their path
286 if (os.path.exists(savepath +
"matricesDataBase.h5")):
290 if "dm" in matricesToLoad:
296 return matricesToLoad
300 """ Compare the current turbulence parameters to the database. If similar parameters
301 are found, the matricesToLoad dictionary is completed.
302 Since all the turbulence matrices are computed together, we only check the parameters
303 for the A matrix : if we load A, we load B, istx and isty too.
307 config : (module) : simulation parameters
309 matricesToLoad : (dictionary) : matrices that will be load and their path
311 dataBase = pandas.read_hdf(savepath +
"matricesDataBase.h5",
"A")
313 "_Param_atmos__r0",
"_Param_atmos__seeds",
"_Param_atmos__L0",
314 "_Param_atmos__alt",
"_Param_tel__diam",
"_Param_tel__cobs",
315 "_Param_geom__pupdiam",
"_Param_geom__zenithangle",
"_Param_target__xpos",
316 "_Param_target__ypos",
"_Param_wfs__xpos",
"_Param_wfs__ypos"
319 for i
in dataBase.index:
321 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).strip()
322 if (dataBase.loc[i,
"validity"]
and (dataBase.loc[i,
"commit"] == commit)):
325 if (cc >= len(param2test)):
328 cond = dataBase.loc[i, param2test[cc]] == pdict[param2test[cc]]
329 if type(cond)
is np.ndarray:
336 print(param2test[cc] +
" has changed from ",
337 dataBase.loc[i, param2test[cc]],
" to ", pdict[param2test[cc]])
343 matricesToLoad[
"index_turbu"] = i
344 matricesToLoad[
"A"] = dataBase.loc[i,
"path2file"]
357 """ Compare the current controller parameters to the database. If similar parameters
358 are found, matricesToLoad dictionary is completed.
359 Since all the controller matrices are computed together, we only check the parameters
360 for the imat matrix : if we load imat, we load eigenv and U too.
364 config : (module) : simulation parameters
366 matricesToLoad : (dictionary) : matrices that will be load and their path
368 dataBase = pandas.read_hdf(savepath +
"matricesDataBase.h5",
"imat")
371 "_Param_tel__diam",
"_Param_tel__t_spiders",
"_Param_tel__spiders_type",
372 "_Param_tel__pupangle",
"_Param_tel__referr",
"_Param_tel__std_piston",
373 "_Param_tel__std_tt",
"_Param_tel__type_ap",
"_Param_tel__nbrmissing",
374 "_Param_tel__cobs",
"_Param_geom__pupdiam",
"nwfs",
"_Param_wfs__type",
375 "_Param_wfs__nxsub",
"_Param_wfs__npix",
"_Param_wfs__pixsize",
376 "_Param_wfs__fracsub",
"_Param_wfs__xpos",
"_Param_wfs__ypos",
377 "_Param_wfs__Lambda",
"_Param_wfs__dms_seen",
"_Param_wfs__fssize",
378 "_Param_wfs__fstop",
"_Param_wfs__pyr_ampl",
"_Param_wfs__pyr_loc",
379 "_Param_wfs__pyr_npts",
"_Param_wfs__pyr_pup_sep",
"_Param_wfs__pyrtype",
380 "ndms",
"_Param_dm__type",
"_Param_dm__alt",
"_Param_dm__coupling",
381 "_Param_dm__margin_in",
"_Param_dm__margin_out",
"_Param_dm__nact",
382 "_Param_dm__nkl",
"_Param_dm__type_kl",
"_Param_dm__push4imat",
383 "_Param_dm__thresh",
"_Param_dm__unitpervolt",
"ncentroiders",
384 "_Param_centroider__type",
"_Param_centroider__nmax",
385 "_Param_centroider__nwfs",
"_Param_centroider__sizex",
386 "_Param_centroider__sizey",
"_Param_centroider__thresh",
387 "_Param_centroider__type_fct",
"_Param_centroider__weights",
388 "_Param_centroider__width"
391 for i
in dataBase.index:
393 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).strip()
394 if (dataBase.loc[i,
"validity"]
and (dataBase.loc[i,
"commit"] == commit)):
397 if (cc >= len(param2test)):
400 cond = dataBase.loc[i, param2test[cc]] == pdict[param2test[cc]]
401 if type(cond)
is np.ndarray:
408 print(param2test[cc] +
" has changed from ",
409 dataBase.loc[i, param2test[cc]],
" to ", pdict[param2test[cc]])
415 matricesToLoad[
"index_control"] = i
416 matricesToLoad[
"imat"] = dataBase.loc[i,
"path2file"]
421 """ Compare the current controller parameters to the database. If similar parameters
422 are found, matricesToLoad dictionary is completed.
423 Since all the dms matrices are computed together, we only check the parameters
424 for the pztok matrix : if we load pztok, we load pztnok too.
428 config : (module) : simulation parameters
430 matricesToLoad : (dictionary) : matrices that will be load and their path
432 dataBase = pandas.read_hdf(savepath +
"matricesDataBase.h5",
"dm")
435 "_Param_tel__diam",
"_Param_tel__t_spiders",
"_Param_tel__spiders_type",
436 "_Param_tel__pupangle",
"_Param_tel__referr",
"_Param_tel__std_piston",
437 "_Param_tel__std_tt",
"_Param_tel__type_ap",
"_Param_tel__nbrmissing",
438 "_Param_tel__cobs",
"_Param_geom__pupdiam",
"nwfs",
"_Param_wfs__type",
439 "_Param_wfs__nxsub",
"_Param_wfs__npix",
"_Param_wfs__pixsize",
440 "_Param_wfs__fracsub",
"_Param_wfs__xpos",
"_Param_wfs__ypos",
441 "_Param_wfs__Lambda",
"_Param_wfs__dms_seen",
"_Param_wfs__fssize",
442 "_Param_wfs__fstop",
"_Param_wfs__pyr_ampl",
"_Param_wfs__pyr_loc",
443 "_Param_wfs__pyr_npts",
"_Param_wfs__pyrtype",
"_Param_wfs__pyr_pup_sep",
444 "ndms",
"_Param_dm__type",
"_Param_dm__alt",
"_Param_dm__coupling",
445 "_Param_dm__margin_in",
"_Param_dm__margin_out",
"_Param_dm__nkl",
446 "_Param_dm__nact",
"_Param_dm__type_kl",
"_Param_dm__push4imat",
447 "_Param_dm__thresh",
"_Param_dm__unitpervolt"
450 for i
in dataBase.index:
452 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).strip()
453 if (dataBase.loc[i,
"validity"]
and (dataBase.loc[i,
"commit"] == commit)):
456 if (cc >= len(param2test)):
459 cond = dataBase.loc[i, param2test[cc]] == pdict[param2test[cc]]
460 if type(cond)
is np.ndarray:
467 print((param2test[cc] +
" has changed from ",
468 dataBase.loc[i, param2test[cc]],
" to ", pdict[param2test[cc]]))
474 matricesToLoad[
"index_dms"] = i
475 matricesToLoad[
"dm"] = dataBase.loc[i,
"path2file"]
482 store = pandas.HDFStore(savepath +
"matricesDataBase.h5")
483 if not (
"A" in matricesToLoad):
485 if not (
"dm" in matricesToLoad):
487 if not (
"imat" in matricesToLoad):
495 f = h5py.File(filename,
"r+")
496 f.attrs[
"validity"] =
True
503 df = store[matricetype]
504 ind = len(df.index) - 1
505 df.loc[ind,
"validity"] =
True
506 store[matricetype] = df
515 f = h5py.File(filename,
"r")
517 config.simul_name = str(f.attrs.get(
"simulname"))
519 config.p_loop.set_niter(f.attrs.get(
"niter"))
520 config.p_loop.set_ittime(f.attrs.get(
"ittime"))
523 config.p_geom.set_zenithangle(f.attrs.get(
"zenithangle"))
524 config.p_geom.set_pupdiam(f.attrs.get(
"pupdiam"))
527 config.p_tel.set_diam(f.attrs.get(
"tel_diam"))
528 config.p_tel.set_cobs(f.attrs.get(
"cobs"))
529 config.p_tel.set_nbrmissing(f.attrs.get(
"nbrmissing"))
530 config.p_tel.set_t_spiders(f.attrs.get(
"t_spiders"))
531 config.p_tel.set_type_ap(str(f.attrs.get(
"type_ap")))
532 config.p_tel.set_spiders_type(str(f.attrs.get(
"spiders_type")))
533 config.p_tel.set_pupangle(f.attrs.get(
"pupangle"))
534 config.p_tel.set_referr(f.attrs.get(
"referr"))
535 config.p_tel.set_std_piston(f.attrs.get(
"std_piston"))
536 config.p_tel.set_std_tt(f.attrs.get(
"std_tt"))
539 config.p_atmos.set_r0(f.attrs.get(
"r0"))
540 config.p_atmos.set_nscreens(f.attrs.get(
"nscreens"))
541 config.p_atmos.set_frac(f.attrs.get(
"frac"))
542 config.p_atmos.set_alt(f.attrs.get(
"atm.alt"))
543 config.p_atmos.set_windspeed(f.attrs.get(
"windspeed"))
544 config.p_atmos.set_winddir(f.attrs.get(
"winddir"))
545 config.p_atmos.set_L0(f.attrs.get(
"L0"))
546 config.p_atmos.set_seeds(f.attrs.get(
"seeds"))
549 config.p_target.set_nTargets(f.attrs.get(
"ntargets"))
550 config.p_target.set_xpos(f.attrs.get(
"target.xpos"))
551 config.p_target.set_ypos(f.attrs.get(
"target.ypos"))
552 config.p_target.set_Lambda(f.attrs.get(
"target.Lambda"))
553 config.p_target.set_mag(f.attrs.get(
"target.mag"))
554 if (f.attrs.get(
"target.dms_seen") > -1):
555 config.p_target.set_dms_seen(f.attrs.get(
"target.dms_seen"))
559 for i
in range(f.attrs.get(
"nwfs")):
561 config.p_wfss[i].set_type(str(f.attrs.get(
"type")[i]))
562 config.p_wfss[i].set_nxsub(f.attrs.get(
"nxsub")[i])
563 config.p_wfss[i].set_npix(f.attrs.get(
"npix")[i])
564 config.p_wfss[i].set_pixsize(f.attrs.get(
"pixsize")[i])
565 config.p_wfss[i].set_fracsub(f.attrs.get(
"fracsub")[i])
566 config.p_wfss[i].set_xpos(f.attrs.get(
"wfs.xpos")[i])
567 config.p_wfss[i].set_ypos(f.attrs.get(
"wfs.ypos")[i])
568 config.p_wfss[i].set_Lambda(f.attrs.get(
"wfs.Lambda")[i])
569 config.p_wfss[i].set_gsmag(f.attrs.get(
"gsmag")[i])
570 config.p_wfss[i].set_optthroughput(f.attrs.get(
"optthroughput")[i])
571 config.p_wfss[i].set_zerop(f.attrs.get(
"zerop")[i])
572 config.p_wfss[i].set_noise(f.attrs.get(
"noise")[i])
573 config.p_wfss[i].set_atmos_seen(f.attrs.get(
"atmos_seen")[i])
574 config.p_wfss[i].set_fstop(str(f.attrs.get(
"fstop")[i]))
575 config.p_wfss[i].set_pyr_npts(f.attrs.get(
"pyr_npts")[i])
576 config.p_wfss[i].set_pyr_ampl(f.attrs.get(
"pyr_ampl")[i])
577 config.p_wfss[i].set_pyrtype(str(f.attrs.get(
"pyrtype")[i]))
578 config.p_wfss[i].set_pyr_loc(str(f.attrs.get(
"pyr_loc")[i]))
579 config.p_wfss[i].set_fssize(f.attrs.get(
"fssize")[i])
580 if ((f.attrs.get(
"dms_seen")[i] > -1).all()):
581 config.p_wfss[i].set_dms_seen(f.attrs.get(
"dms_seen")[i])
584 config.p_wfss[i].set_gsalt(f.attrs.get(
"gsalt")[i])
585 config.p_wfss[i].set_lltx(f.attrs.get(
"lltx")[i])
586 config.p_wfss[i].set_llty(f.attrs.get(
"llty")[i])
587 config.p_wfss[i].set_laserpower(f.attrs.get(
"laserpower")[i])
588 config.p_wfss[i].set_lgsreturnperwatt(f.attrs.get(
"lgsreturnperwatt")[i])
589 config.p_wfss[i].set_proftype(str(f.attrs.get(
"proftype")[i]))
590 config.p_wfss[i].set_beamsize(f.attrs.get(
"beamsize")[i])
594 if (f.attrs.get(
"ndms")):
595 for i
in range(f.attrs.get(
"ndms")):
597 config.p_dms[i].set_type(str(f.attrs.get(
"type")[i]))
598 config.p_dms[i].set_nact(f.attrs.get(
"nact")[i])
599 config.p_dms[i].set_alt(f.attrs.get(
"dm.alt")[i])
600 config.p_dms[i].set_thresh(f.attrs.get(
"dm.thresh")[i])
601 config.p_dms[i].set_coupling(f.attrs.get(
"coupling")[i])
602 config.p_dms[i].set_unitpervolt(f.attrs.get(
"unitpervolt")[i])
603 config.p_dms[i].set_push4imat(f.attrs.get(
"push4imat")[i])
606 config.p_centroiders = []
607 if (f.attrs.get(
"ncentroiders")):
608 for i
in range(f.attrs.get(
"ncentroiders")):
610 config.p_centroiders[i].set_nwfs(f.attrs.get(
"centro.nwfs")[i])
611 config.p_centroiders[i].set_type(str(f.attrs.get(
"type")[i]))
612 config.p_centroiders[i].set_type_fct(str(f.attrs.get(
"type_fct")[i]))
613 config.p_centroiders[i].set_nmax(f.attrs.get(
"nmax")[i])
614 config.p_centroiders[i].set_thresh(f.attrs.get(
"centroider.thresh")[i])
615 if (f.attrs.get(
"weights")[i]):
616 config.p_centroiders[i].set_weights(f.attrs.get(
"weights")[i])
617 config.p_centroiders[i].set_width(f.attrs.get(
"width")[i])
618 config.p_rtc.set_centroiders(config.p_centroiders)
621 config.p_controllers = []
622 if (f.attrs.get(
"ncontrollers")):
623 for i
in range(f.attrs.get(
"ncontrollers")):
625 config.p_controllers[i].set_type(str(f.attrs.get(
"type")[i]))
626 config.p_controllers[i].set_nwfs(f.attrs.get(
"control.nwfs")[i])
627 config.p_controllers[i].set_ndm(f.attrs.get(
"ndm")[i])
628 config.p_controllers[i].set_maxcond(f.attrs.get(
"maxcond")[i])
629 config.p_controllers[i].set_delay(f.attrs.get(
"delay")[i])
630 config.p_controllers[i].set_gain(f.attrs.get(
"gain")[i])
631 config.p_controllers[i].set_modopti(f.attrs.get(
"modopti")[i])
632 config.p_controllers[i].set_nrec(f.attrs.get(
"nrec")[i])
633 config.p_controllers[i].set_nmodes(f.attrs.get(
"nmodes")[i])
634 config.p_controllers[i].set_gmin(f.attrs.get(
"gmin")[i])
635 config.p_controllers[i].set_gmax(f.attrs.get(
"gmax")[i])
636 config.p_controllers[i].set_ngain(f.attrs.get(
"ngain")[i])
637 config.p_controllers[i].set_TTcond(f.attrs.get(
"TTcond")[i])
638 config.p_controllers[i].set_cured_ndivs(f.attrs.get(
"cured_ndivs")[i])
639 config.p_rtc.set_controllers(config.p_controllers)
641 config.p_rtc.set_nwfs(f.attrs.get(
"nwfs"))
643 print(
"Parameters have been read from ", filename,
"header")
647 """ Write a hdf5 file containig a single field
649 If the file already exists, it will be overwritten
653 filename: (str) : name of the file to write
655 data: (np.ndarray) : content of the file
657 datasetName: (str) : name of the dataset to write (default="dataset")
660 f = h5py.File(filename,
"w")
661 f.create_dataset(datasetName, data=data)
666 """ Read a single dataset from an hdf5 file
670 filename: (str) : name of the file to read from
672 datasetName: (str) : name of the dataset to read (default="dataset")
675 f = h5py.File(filename,
"r")
676 data = f[datasetName][:]
682 """ Read and return A, B, istx and isty from the database
686 database: (dict): dictionary containing paths to matrices to load
688 ind: (int): layer index
690 print(
"loading", database[
"A"])
691 f = h5py.File(database[
"A"],
'r')
692 A = f[
"A_" + str(ind)][:]
693 B = f[
"B_" + str(ind)][:]
694 istx = f[
"istx_" + str(ind)][:]
695 isty = f[
"isty_" + str(ind)][:]
698 return A, B, istx, isty
702 """ Save A, B, istx and isty in the database
716 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).decode(
'utf8').strip()
717 print(
"writing files and updating database")
718 df = pandas.read_hdf(
719 os.getenv(
'SHESHA_ROOT') +
"/data/dataBase/matricesDataBase.h5",
"A")
720 ind = len(df.index) - 1
721 savename = os.getenv(
'SHESHA_ROOT') +
"/data/dataBase/turbu/A_" + \
722 commit +
"_" + str(ind) +
".h5"
725 save_hdf5(savename,
"istx_" + str(k), istx)
726 save_hdf5(savename,
"isty_" + str(k), isty)
730 """ Read and return the DM geometry
734 database: (dict): dictionary containing paths to matrices to load
738 print(
"loading", database[
"dm"])
739 f = h5py.File(database[
"dm"],
'r')
740 influpos = f[
"influpos_" + str(ndm)][:]
741 ninflu = f[
"ninflu_" + str(ndm)][:]
742 influstart = f[
"influstart_" + str(ndm)][:]
743 i1 = f[
"i1_" + str(ndm)][:]
744 j1 = f[
"j1_" + str(ndm)][:]
745 ok = f[
"ok_" + str(ndm)][:]
748 return influpos, ninflu, influstart, i1, j1, ok
752 """ Save the DM geometry in the database
768 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).decode(
'utf8').strip()
769 print(
"writing files and updating database")
770 df = pandas.read_hdf(
771 os.getenv(
'SHESHA_ROOT') +
"/data/dataBase/matricesDataBase.h5",
"dm")
772 ind = len(df.index) - 1
773 savename = os.getenv(
'SHESHA_ROOT') +
"/data/dataBase/mat/dm_" + \
774 commit +
"_" + str(ind) +
".h5"
775 save_hdf5(savename,
"influpos_" + str(ndm), influpos)
776 save_hdf5(savename,
"ninflu_" + str(ndm), ninflu)
777 save_hdf5(savename,
"influstart_" + str(ndm), influstart)
778 save_hdf5(savename,
"i1_" + str(ndm), i1)
779 save_hdf5(savename,
"j1_" + str(ndm), j1)
780 save_hdf5(savename,
"ok_" + str(ndm), ok)
784 """ Read and return the imat
788 database: (dict): dictionary containing paths to matrices to load
790 print(
"loading", database[
"imat"])
791 f = h5py.File(database[
"imat"],
'r')
799 """ Save the DM geometry in the database
803 imat: (np.ndarray): imat to save
805 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).decode(
'utf8').strip()
806 print(
"writing files and updating database")
807 df = pandas.read_hdf(
808 os.getenv(
'SHESHA_ROOT') +
"/data/dataBase/matricesDataBase.h5",
"imat")
809 ind = len(df.index) - 1
810 savename = os.getenv(
'SHESHA_ROOT') +
"/data/dataBase/mat/imat_" + \
811 commit +
"_" + str(ind) +
".h5"
P-Class (parametres) Param_centroider.
P-Class (parametres) Param_controller.
P-Class (parametres) Param_dm.
P-Class (parametres) Param_wfs.
def save_imat_in_dataBase(imat)
Save the DM geometry in the database.
def params_dictionary(config)
Create and returns a dictionary of all the config parameters with the corresponding keys for further ...
def initDataBase(savepath, param_dict)
Initialize and create the database for all the saved matrices.
def validInStore(store, savepath, matricetype)
TODO docstring.
def readHdf5SingleDataset(filename, datasetName="dataset")
Read a single dataset from an hdf5 file.
def save_h5(filename, dataname, config, data)
save_hdf5(filename, dataname, config, data) Create a hdf5 file and store data in it with full header ...
def checkTurbuParams(savepath, config, pdict, matricesToLoad)
Compare the current turbulence parameters to the database.
def writeHdf5SingleDataset(filename, data, datasetName="dataset")
Write a hdf5 file containig a single field.
def load_imat_from_dataBase(database)
Read and return the imat.
def save_dm_geom_in_dataBase(ndm, influpos, ninflu, influstart, i1, j1, ok)
Save the DM geometry in the database.
def save_hdf5(filename, dataname, data)
save_hdf5(filename, dataname, data) Create a dataset in an existing hdf5 file filename and store data...
def updateParamDict(pdict, pClass, prefix)
Update parameters dictionnary pdict with all the parameters of pClass.
def updateDataBase(h5file, savepath, matrix_type)
Update the database adding a new row to the matrix_type database.
def load_AB_from_dataBase(database, ind)
Read and return A, B, istx and isty from the database.
def configFromH5(filename, config)
TODO docstring.
def load_dm_geom_from_dataBase(database, ndm)
Read and return the DM geometry.
def checkDmsParams(savepath, config, pdict, matricesToLoad)
Compare the current controller parameters to the database.
def checkControlParams(savepath, config, pdict, matricesToLoad)
Compare the current controller parameters to the database.
def save_AB_in_database(k, A, B, istx, isty)
Save A, B, istx and isty in the database.
def init_hdf5_files(savepath, param_dict, matricesToLoad)
TODO docstring.
def create_file_attributes(filename, param_dict)
create_file_attributes(filename,config) Create an hdf5 file wtih attributes corresponding to all simu...
def validFile(filename)
TODO docstring.
def validDataBase(savepath, matricesToLoad)
TODO docstring.
def checkMatricesDataBase(savepath, config, param_dict)
Check in the database if the current config have been already run.