42 from subprocess
import check_output
47 Update parameters dictionnary pdict with all the parameters of pClass.
48 Prefix must be set to define the key value of the new dict entries
50 if (isinstance(pClass, list)):
52 i
for i
in dir(pClass[0])
53 if (
not i.startswith(
'_')
and not i.startswith(
'set_')
and
54 not i.startswith(
'get_'))
59 p.__dict__[prefix + k].encode(
"utf8")
if isinstance(
60 p.__dict__[prefix + k], str)
else
61 p.__dict__[prefix + k]
for p
in pClass
67 i
for i
in dir(pClass)
68 if (
not i.startswith(
'_')
and not i.startswith(
'set_')
and
69 not i.startswith(
'get_'))
73 if isinstance(pClass.__dict__[prefix + k], str):
74 pdict.update({prefix + k: pClass.__dict__[prefix + k].encode(
"utf8")})
76 pdict.update({prefix + k: pClass.__dict__[prefix + k]})
80 """ Create and returns a dictionary of all the config parameters with the
81 corresponding keys for further creation of database and save files
83 :param config: (module) : simulation parameters
85 :return param_dict: (dictionary) : dictionary of parameters
88 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).strip()
90 param_dict = {
"simul_name": config.simul_name.encode(
'utf8'),
"commit": commit}
95 if config.p_atmos
is not None:
97 if config.p_target
is not None:
99 param_dict.update({
"ntargets": len(config.p_targets)})
100 if config.p_wfss
is not None:
102 param_dict.update({
"nwfs": len(config.p_wfss)})
103 if config.p_dms
is not None:
105 param_dict.update({
"ndms": len(config.p_dms)})
106 if config.p_controllers
is not None:
107 updateParamDict(param_dict, config.p_controllers,
"_Param_controller__")
108 param_dict.update({
"ncontrollers": len(config.p_controllers)})
109 if config.p_centroiders
is not None:
110 updateParamDict(param_dict, config.p_centroiders,
"_Param_centroider__")
111 param_dict.update({
"ncentroiders": len(config.p_centroiders)})
113 for k
in param_dict.keys():
114 if type(param_dict[k])
is list:
115 param_dict[k] = [d
if d
is not None else -10
for d
in param_dict[k]]
116 elif param_dict[k]
is None:
122 """ create_file_attributes(filename,config)
123 Create an hdf5 file wtih attributes corresponding to all simulation parameters
127 filename : (str) : full path + filename to create
129 config : () : simulation parameters
131 f = h5py.File(filename,
"w")
133 for i
in list(param_dict.keys()):
134 if (isinstance(param_dict[i], str)):
135 attr = param_dict[i].encode(
"utf-8")
136 elif (isinstance(param_dict[i], list)):
138 s.encode(
"utf-8")
if isinstance(s, str)
else s
for s
in param_dict[i]
142 f.attrs.create(i, attr)
143 f.attrs.create(
"validity",
False)
144 print(filename,
"initialized")
151 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).decode(
'utf8').strip()
153 if "A" not in matricesToLoad:
154 df = pandas.read_hdf(savepath +
"matricesDataBase.h5",
"A")
156 filename = savepath +
"turbu/A_" + commit +
"_" + str(ind) +
".h5"
160 if not (
"dm" in matricesToLoad):
161 df = pandas.read_hdf(savepath +
"matricesDataBase.h5",
"dm")
163 filename = savepath +
"mat/dm_" + commit +
"_" + str(ind) +
".h5"
167 if not (
"imat" in matricesToLoad):
168 df = pandas.read_hdf(savepath +
"matricesDataBase.h5",
"imat")
170 filename = savepath +
"mat/imat_" + commit +
"_" + str(ind) +
".h5"
176 """ Initialize and create the database for all the saved matrices. This database
177 will be placed on the top of the savepath and be named matricesDataBase.h5.
181 savepath : (str) : path to the data repertory
183 param_dict : (dictionary) : parameters dictionary
185 keys = list(param_dict.keys())
186 keys.append(
"path2file")
187 keys.append(
"validity")
188 df = pandas.DataFrame(columns=keys)
189 store = pandas.HDFStore(savepath +
"matricesDataBase.h5")
191 store.put(
"imat", df)
194 print(
"Matrices database created")
198 """ Update the database adding a new row to the matrix_type database.
202 h5file : (str) : path to the new h5 file to add
204 savepath : (str) : path to the data directory
206 matrix_type : (str) : type of matrix to store ("A","B","istx","isty"
207 "istx","eigenv","imat","U"
210 if (matrix_type ==
"A" or matrix_type ==
"imat" or matrix_type ==
"dm"):
211 f = h5py.File(h5file,
"r")
212 store = pandas.HDFStore(savepath +
"matricesDataBase.h5")
213 df = store[matrix_type]
215 for i
in list(f.attrs.keys()):
216 df.loc[ind, i] = f.attrs[i]
217 df.loc[ind,
"path2file"] = h5file
218 df.loc[ind,
"validity"] =
False
219 store.put(matrix_type, df)
223 raise ValueError(
"Wrong matrix_type specified. See documentation")
227 """ save_hdf5(filename, dataname, data)
228 Create a dataset in an existing hdf5 file filename and store data in it
232 filename: (str) : full path to the file
234 dataname : (str) : name of the data (imat, cmat...)
236 data : np.array : data to save
238 f = h5py.File(filename,
"r+")
239 f.create_dataset(dataname, data=data)
243 def save_h5(filename, dataname, config, data):
244 """ save_hdf5(filename, dataname, config, data)
245 Create a hdf5 file and store data in it with full header from config parameters
246 Usefull to backtrace data origins
250 filename: (str) : full path to the file
252 dataname : (str) : name of the data (imat, cmat...)
254 config : (module) : config parameters
256 data : np.array : data to save
261 print(filename,
"has been written")
265 """ Check in the database if the current config have been already run. If so,
266 return a dictionary containing the matrices to load and their path. Matrices
267 which don't appear in the dictionary will be computed, stored and added
268 to the database during the simulation.
269 If the database doesn't exist, this function creates it.
273 savepath : (str) : path to the data repertory
275 config : (module) : simulation parameters
277 param_dict : (dictionary) : parameters dictionary
281 matricesToLoad : (dictionary) : matrices that will be load and their path
285 if (os.path.exists(savepath +
"matricesDataBase.h5")):
289 if "dm" in matricesToLoad:
295 return matricesToLoad
299 """ Compare the current turbulence parameters to the database. If similar parameters
300 are found, the matricesToLoad dictionary is completed.
301 Since all the turbulence matrices are computed together, we only check the parameters
302 for the A matrix : if we load A, we load B, istx and isty too.
306 config : (module) : simulation parameters
308 matricesToLoad : (dictionary) : matrices that will be load and their path
310 dataBase = pandas.read_hdf(savepath +
"matricesDataBase.h5",
"A")
312 "_Param_atmos__r0",
"_Param_atmos__seeds",
"_Param_atmos__L0",
313 "_Param_atmos__alt",
"_Param_tel__diam",
"_Param_tel__cobs",
314 "_Param_geom__pupdiam",
"_Param_geom__zenithangle",
"_Param_target__xpos",
315 "_Param_target__ypos",
"_Param_wfs__xpos",
"_Param_wfs__ypos"
318 for i
in dataBase.index:
320 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).strip()
321 if (dataBase.loc[i,
"validity"]
and (dataBase.loc[i,
"commit"] == commit)):
324 if (cc >= len(param2test)):
327 cond = dataBase.loc[i, param2test[cc]] == pdict[param2test[cc]]
328 if type(cond)
is np.ndarray:
335 print(param2test[cc] +
" has changed from ",
336 dataBase.loc[i, param2test[cc]],
" to ", pdict[param2test[cc]])
342 matricesToLoad[
"index_turbu"] = i
343 matricesToLoad[
"A"] = dataBase.loc[i,
"path2file"]
356 """ Compare the current controller parameters to the database. If similar parameters
357 are found, matricesToLoad dictionary is completed.
358 Since all the controller matrices are computed together, we only check the parameters
359 for the imat matrix : if we load imat, we load eigenv and U too.
363 config : (module) : simulation parameters
365 matricesToLoad : (dictionary) : matrices that will be load and their path
367 dataBase = pandas.read_hdf(savepath +
"matricesDataBase.h5",
"imat")
370 "_Param_tel__diam",
"_Param_tel__t_spiders",
"_Param_tel__spiders_type",
371 "_Param_tel__pupangle",
"_Param_tel__referr",
"_Param_tel__std_piston",
372 "_Param_tel__std_tt",
"_Param_tel__type_ap",
"_Param_tel__nbrmissing",
373 "_Param_tel__cobs",
"_Param_geom__pupdiam",
"nwfs",
"_Param_wfs__type",
374 "_Param_wfs__nxsub",
"_Param_wfs__npix",
"_Param_wfs__pixsize",
375 "_Param_wfs__fracsub",
"_Param_wfs__xpos",
"_Param_wfs__ypos",
376 "_Param_wfs__Lambda",
"_Param_wfs__dms_seen",
"_Param_wfs__fssize",
377 "_Param_wfs__fstop",
"_Param_wfs__pyr_ampl",
"_Param_wfs__pyr_loc",
378 "_Param_wfs__pyr_npts",
"_Param_wfs__pyr_pup_sep",
"_Param_wfs__pyrtype",
379 "ndms",
"_Param_dm__type",
"_Param_dm__alt",
"_Param_dm__coupling",
380 "_Param_dm__margin_in",
"_Param_dm__margin_out",
"_Param_dm__nact",
381 "_Param_dm__nkl",
"_Param_dm__type_kl",
"_Param_dm__push4imat",
382 "_Param_dm__thresh",
"_Param_dm__unitpervolt",
"ncentroiders",
383 "_Param_centroider__type",
"_Param_centroider__nmax",
384 "_Param_centroider__nwfs",
"_Param_centroider__sizex",
385 "_Param_centroider__sizey",
"_Param_centroider__thresh",
386 "_Param_centroider__type_fct",
"_Param_centroider__weights",
387 "_Param_centroider__width"
390 for i
in dataBase.index:
392 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).strip()
393 if (dataBase.loc[i,
"validity"]
and (dataBase.loc[i,
"commit"] == commit)):
396 if (cc >= len(param2test)):
399 cond = dataBase.loc[i, param2test[cc]] == pdict[param2test[cc]]
400 if type(cond)
is np.ndarray:
407 print(param2test[cc] +
" has changed from ",
408 dataBase.loc[i, param2test[cc]],
" to ", pdict[param2test[cc]])
414 matricesToLoad[
"index_control"] = i
415 matricesToLoad[
"imat"] = dataBase.loc[i,
"path2file"]
420 """ Compare the current controller parameters to the database. If similar parameters
421 are found, matricesToLoad dictionary is completed.
422 Since all the dms matrices are computed together, we only check the parameters
423 for the pztok matrix : if we load pztok, we load pztnok too.
427 config : (module) : simulation parameters
429 matricesToLoad : (dictionary) : matrices that will be load and their path
431 dataBase = pandas.read_hdf(savepath +
"matricesDataBase.h5",
"dm")
434 "_Param_tel__diam",
"_Param_tel__t_spiders",
"_Param_tel__spiders_type",
435 "_Param_tel__pupangle",
"_Param_tel__referr",
"_Param_tel__std_piston",
436 "_Param_tel__std_tt",
"_Param_tel__type_ap",
"_Param_tel__nbrmissing",
437 "_Param_tel__cobs",
"_Param_geom__pupdiam",
"nwfs",
"_Param_wfs__type",
438 "_Param_wfs__nxsub",
"_Param_wfs__npix",
"_Param_wfs__pixsize",
439 "_Param_wfs__fracsub",
"_Param_wfs__xpos",
"_Param_wfs__ypos",
440 "_Param_wfs__Lambda",
"_Param_wfs__dms_seen",
"_Param_wfs__fssize",
441 "_Param_wfs__fstop",
"_Param_wfs__pyr_ampl",
"_Param_wfs__pyr_loc",
442 "_Param_wfs__pyr_npts",
"_Param_wfs__pyrtype",
"_Param_wfs__pyr_pup_sep",
443 "ndms",
"_Param_dm__type",
"_Param_dm__alt",
"_Param_dm__coupling",
444 "_Param_dm__margin_in",
"_Param_dm__margin_out",
"_Param_dm__nkl",
445 "_Param_dm__nact",
"_Param_dm__type_kl",
"_Param_dm__push4imat",
446 "_Param_dm__thresh",
"_Param_dm__unitpervolt"
449 for i
in dataBase.index:
451 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).strip()
452 if (dataBase.loc[i,
"validity"]
and (dataBase.loc[i,
"commit"] == commit)):
455 if (cc >= len(param2test)):
458 cond = dataBase.loc[i, param2test[cc]] == pdict[param2test[cc]]
459 if type(cond)
is np.ndarray:
466 print((param2test[cc] +
" has changed from ",
467 dataBase.loc[i, param2test[cc]],
" to ", pdict[param2test[cc]]))
473 matricesToLoad[
"index_dms"] = i
474 matricesToLoad[
"dm"] = dataBase.loc[i,
"path2file"]
481 store = pandas.HDFStore(savepath +
"matricesDataBase.h5")
482 if not (
"A" in matricesToLoad):
484 if not (
"dm" in matricesToLoad):
486 if not (
"imat" in matricesToLoad):
494 f = h5py.File(filename,
"r+")
495 f.attrs[
"validity"] =
True
502 df = store[matricetype]
503 ind = len(df.index) - 1
504 df.loc[ind,
"validity"] =
True
505 store[matricetype] = df
514 f = h5py.File(filename,
"r")
516 config.simul_name = str(f.attrs.get(
"simulname"))
518 config.p_loop.set_niter(f.attrs.get(
"niter"))
519 config.p_loop.set_ittime(f.attrs.get(
"ittime"))
522 config.p_geom.set_zenithangle(f.attrs.get(
"zenithangle"))
523 config.p_geom.set_pupdiam(f.attrs.get(
"pupdiam"))
526 config.p_tel.set_diam(f.attrs.get(
"tel_diam"))
527 config.p_tel.set_cobs(f.attrs.get(
"cobs"))
528 config.p_tel.set_nbrmissing(f.attrs.get(
"nbrmissing"))
529 config.p_tel.set_t_spiders(f.attrs.get(
"t_spiders"))
530 config.p_tel.set_type_ap(str(f.attrs.get(
"type_ap")))
531 config.p_tel.set_spiders_type(str(f.attrs.get(
"spiders_type")))
532 config.p_tel.set_pupangle(f.attrs.get(
"pupangle"))
533 config.p_tel.set_referr(f.attrs.get(
"referr"))
534 config.p_tel.set_std_piston(f.attrs.get(
"std_piston"))
535 config.p_tel.set_std_tt(f.attrs.get(
"std_tt"))
538 config.p_atmos.set_r0(f.attrs.get(
"r0"))
539 config.p_atmos.set_nscreens(f.attrs.get(
"nscreens"))
540 config.p_atmos.set_frac(f.attrs.get(
"frac"))
541 config.p_atmos.set_alt(f.attrs.get(
"atm.alt"))
542 config.p_atmos.set_windspeed(f.attrs.get(
"windspeed"))
543 config.p_atmos.set_winddir(f.attrs.get(
"winddir"))
544 config.p_atmos.set_L0(f.attrs.get(
"L0"))
545 config.p_atmos.set_seeds(f.attrs.get(
"seeds"))
548 config.p_target.set_nTargets(f.attrs.get(
"ntargets"))
549 config.p_target.set_xpos(f.attrs.get(
"target.xpos"))
550 config.p_target.set_ypos(f.attrs.get(
"target.ypos"))
551 config.p_target.set_Lambda(f.attrs.get(
"target.Lambda"))
552 config.p_target.set_mag(f.attrs.get(
"target.mag"))
553 if (f.attrs.get(
"target.dms_seen") > -1):
554 config.p_target.set_dms_seen(f.attrs.get(
"target.dms_seen"))
558 for i
in range(f.attrs.get(
"nwfs")):
559 config.p_wfss.append(ao.Param_wfs())
560 config.p_wfss[i].set_type(str(f.attrs.get(
"type")[i]))
561 config.p_wfss[i].set_nxsub(f.attrs.get(
"nxsub")[i])
562 config.p_wfss[i].set_npix(f.attrs.get(
"npix")[i])
563 config.p_wfss[i].set_pixsize(f.attrs.get(
"pixsize")[i])
564 config.p_wfss[i].set_fracsub(f.attrs.get(
"fracsub")[i])
565 config.p_wfss[i].set_xpos(f.attrs.get(
"wfs.xpos")[i])
566 config.p_wfss[i].set_ypos(f.attrs.get(
"wfs.ypos")[i])
567 config.p_wfss[i].set_Lambda(f.attrs.get(
"wfs.Lambda")[i])
568 config.p_wfss[i].set_gsmag(f.attrs.get(
"gsmag")[i])
569 config.p_wfss[i].set_optthroughput(f.attrs.get(
"optthroughput")[i])
570 config.p_wfss[i].set_zerop(f.attrs.get(
"zerop")[i])
571 config.p_wfss[i].set_noise(f.attrs.get(
"noise")[i])
572 config.p_wfss[i].set_atmos_seen(f.attrs.get(
"atmos_seen")[i])
573 config.p_wfss[i].set_fstop(str(f.attrs.get(
"fstop")[i]))
574 config.p_wfss[i].set_pyr_npts(f.attrs.get(
"pyr_npts")[i])
575 config.p_wfss[i].set_pyr_ampl(f.attrs.get(
"pyr_ampl")[i])
576 config.p_wfss[i].set_pyrtype(str(f.attrs.get(
"pyrtype")[i]))
577 config.p_wfss[i].set_pyr_loc(str(f.attrs.get(
"pyr_loc")[i]))
578 config.p_wfss[i].set_fssize(f.attrs.get(
"fssize")[i])
579 if ((f.attrs.get(
"dms_seen")[i] > -1).all()):
580 config.p_wfss[i].set_dms_seen(f.attrs.get(
"dms_seen")[i])
583 config.p_wfss[i].set_gsalt(f.attrs.get(
"gsalt")[i])
584 config.p_wfss[i].set_lltx(f.attrs.get(
"lltx")[i])
585 config.p_wfss[i].set_llty(f.attrs.get(
"llty")[i])
586 config.p_wfss[i].set_laserpower(f.attrs.get(
"laserpower")[i])
587 config.p_wfss[i].set_lgsreturnperwatt(f.attrs.get(
"lgsreturnperwatt")[i])
588 config.p_wfss[i].set_proftype(str(f.attrs.get(
"proftype")[i]))
589 config.p_wfss[i].set_beamsize(f.attrs.get(
"beamsize")[i])
593 if (f.attrs.get(
"ndms")):
594 for i
in range(f.attrs.get(
"ndms")):
595 config.p_dms.append(ao.Param_dm())
596 config.p_dms[i].set_type(str(f.attrs.get(
"type")[i]))
597 config.p_dms[i].set_nact(f.attrs.get(
"nact")[i])
598 config.p_dms[i].set_alt(f.attrs.get(
"dm.alt")[i])
599 config.p_dms[i].set_thresh(f.attrs.get(
"dm.thresh")[i])
600 config.p_dms[i].set_coupling(f.attrs.get(
"coupling")[i])
601 config.p_dms[i].set_unitpervolt(f.attrs.get(
"unitpervolt")[i])
602 config.p_dms[i].set_push4imat(f.attrs.get(
"push4imat")[i])
605 config.p_centroiders = []
606 if (f.attrs.get(
"ncentroiders")):
607 for i
in range(f.attrs.get(
"ncentroiders")):
608 config.p_centroiders.append(ao.Param_centroider())
609 config.p_centroiders[i].set_nwfs(f.attrs.get(
"centro.nwfs")[i])
610 config.p_centroiders[i].set_type(str(f.attrs.get(
"type")[i]))
611 config.p_centroiders[i].set_type_fct(str(f.attrs.get(
"type_fct")[i]))
612 config.p_centroiders[i].set_nmax(f.attrs.get(
"nmax")[i])
613 config.p_centroiders[i].set_thresh(f.attrs.get(
"centroider.thresh")[i])
614 if (f.attrs.get(
"weights")[i]):
615 config.p_centroiders[i].set_weights(f.attrs.get(
"weights")[i])
616 config.p_centroiders[i].set_width(f.attrs.get(
"width")[i])
617 config.p_rtc.set_centroiders(config.p_centroiders)
620 config.p_controllers = []
621 if (f.attrs.get(
"ncontrollers")):
622 for i
in range(f.attrs.get(
"ncontrollers")):
623 config.p_controllers.append(ao.Param_controller())
624 config.p_controllers[i].set_type(str(f.attrs.get(
"type")[i]))
625 config.p_controllers[i].set_nwfs(f.attrs.get(
"control.nwfs")[i])
626 config.p_controllers[i].set_ndm(f.attrs.get(
"ndm")[i])
627 config.p_controllers[i].set_maxcond(f.attrs.get(
"maxcond")[i])
628 config.p_controllers[i].set_delay(f.attrs.get(
"delay")[i])
629 config.p_controllers[i].set_gain(f.attrs.get(
"gain")[i])
630 config.p_controllers[i].set_modopti(f.attrs.get(
"modopti")[i])
631 config.p_controllers[i].set_nrec(f.attrs.get(
"nrec")[i])
632 config.p_controllers[i].set_nmodes(f.attrs.get(
"nmodes")[i])
633 config.p_controllers[i].set_gmin(f.attrs.get(
"gmin")[i])
634 config.p_controllers[i].set_gmax(f.attrs.get(
"gmax")[i])
635 config.p_controllers[i].set_ngain(f.attrs.get(
"ngain")[i])
636 config.p_controllers[i].set_TTcond(f.attrs.get(
"TTcond")[i])
637 config.p_controllers[i].set_cured_ndivs(f.attrs.get(
"cured_ndivs")[i])
638 config.p_rtc.set_controllers(config.p_controllers)
640 config.p_rtc.set_nwfs(f.attrs.get(
"nwfs"))
642 print(
"Parameters have been read from ", filename,
"header")
646 """ Write a hdf5 file containig a single field
648 If the file already exists, it will be overwritten
652 filename: (str) : name of the file to write
654 data: (np.ndarray) : content of the file
656 datasetName: (str) : name of the dataset to write (default="dataset")
659 f = h5py.File(filename,
"w")
660 f.create_dataset(datasetName, data=data)
665 """ Read a single dataset from an hdf5 file
669 filename: (str) : name of the file to read from
671 datasetName: (str) : name of the dataset to read (default="dataset")
674 f = h5py.File(filename,
"r")
675 data = f[datasetName][:]
681 """ Read and return A, B, istx and isty from the database
685 database: (dict): dictionary containing paths to matrices to load
687 ind: (int): layer index
689 print(
"loading", database[
"A"])
690 f = h5py.File(database[
"A"],
'r')
691 A = f[
"A_" + str(ind)][:]
692 B = f[
"B_" + str(ind)][:]
693 istx = f[
"istx_" + str(ind)][:]
694 isty = f[
"isty_" + str(ind)][:]
697 return A, B, istx, isty
701 """ Save A, B, istx and isty in the database
715 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).decode(
'utf8').strip()
716 print(
"writing files and updating database")
717 df = pandas.read_hdf(
718 os.getenv(
'SHESHA_ROOT') +
"/data/dataBase/matricesDataBase.h5",
"A")
719 ind = len(df.index) - 1
720 savename = os.getenv(
'SHESHA_ROOT') +
"/data/dataBase/turbu/A_" + \
721 commit +
"_" + str(ind) +
".h5"
724 save_hdf5(savename,
"istx_" + str(k), istx)
725 save_hdf5(savename,
"isty_" + str(k), isty)
729 """ Read and return the DM geometry
733 database: (dict): dictionary containing paths to matrices to load
737 print(
"loading", database[
"dm"])
738 f = h5py.File(database[
"dm"],
'r')
739 influpos = f[
"influpos_" + str(ndm)][:]
740 ninflu = f[
"ninflu_" + str(ndm)][:]
741 influstart = f[
"influstart_" + str(ndm)][:]
742 i1 = f[
"i1_" + str(ndm)][:]
743 j1 = f[
"j1_" + str(ndm)][:]
744 ok = f[
"ok_" + str(ndm)][:]
747 return influpos, ninflu, influstart, i1, j1, ok
751 """ Save the DM geometry in the database
767 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).decode(
'utf8').strip()
768 print(
"writing files and updating database")
769 df = pandas.read_hdf(
770 os.getenv(
'SHESHA_ROOT') +
"/data/dataBase/matricesDataBase.h5",
"dm")
771 ind = len(df.index) - 1
772 savename = os.getenv(
'SHESHA_ROOT') +
"/data/dataBase/mat/dm_" + \
773 commit +
"_" + str(ind) +
".h5"
774 save_hdf5(savename,
"influpos_" + str(ndm), influpos)
775 save_hdf5(savename,
"ninflu_" + str(ndm), ninflu)
776 save_hdf5(savename,
"influstart_" + str(ndm), influstart)
777 save_hdf5(savename,
"i1_" + str(ndm), i1)
778 save_hdf5(savename,
"j1_" + str(ndm), j1)
779 save_hdf5(savename,
"ok_" + str(ndm), ok)
783 """ Read and return the imat
787 database: (dict): dictionary containing paths to matrices to load
789 print(
"loading", database[
"imat"])
790 f = h5py.File(database[
"imat"],
'r')
798 """ Save the DM geometry in the database
802 imat: (np.ndarray): imat to save
804 commit = check_output([
"git",
"rev-parse",
"--short",
"HEAD"]).decode(
'utf8').strip()
805 print(
"writing files and updating database")
806 df = pandas.read_hdf(
807 os.getenv(
'SHESHA_ROOT') +
"/data/dataBase/matricesDataBase.h5",
"imat")
808 ind = len(df.index) - 1
809 savename = os.getenv(
'SHESHA_ROOT') +
"/data/dataBase/mat/imat_" + \
810 commit +
"_" + str(ind) +
".h5"