COMPASS  5.0.0
End-to-end AO simulation tool using GPU acceleration
drax.py
1 """
2 DRAX (Dedicated functions for Roket file Analysis and eXploitation)
3 
4 Useful functions for ROKET file exploitation
5 """
6 
7 import numpy as np
8 import h5py
9 import pandas
10 import matplotlib.pyplot as plt
11 plt.ion()
12 from scipy.sparse import csr_matrix
13 
14 
15 def variance(f, contributors, method="Default"):
16  """ Return the error variance of specified contributors
17  params:
18  f : (h5py.File) : roket hdf5 file opened with h5py
19  contributors : (list of string) : list of the contributors
20  method : (optional, default="Default") : if "Independence", the
21  function returns ths sum of the contributors variances.
22  If "Default", it returns the variance of the contributors sum
23  """
24  P = f["P"][:]
25  nmodes = P.shape[0]
26  swap = np.arange(nmodes) - 2
27  swap[0:2] = [nmodes - 2, nmodes - 1]
28  if (method == "Default"):
29  err = f[contributors[0]][:] * 0.
30  for c in contributors:
31  err += f[c][:]
32  return np.var(P.dot(err), axis=1) #[swap]
33 
34  elif (method == "Independence"):
35  nmodes = P.shape[0]
36  v = np.zeros(nmodes)
37  for c in contributors:
38  v += np.var(P.dot(f[c][:]), axis=1)
39  return v #[swap]
40 
41  else:
42  raise TypeError("Wrong method input")
43 
44 
45 def varianceMultiFiles(fs, frac_per_layer, contributors):
46  """ Return the variance computed from the sum of contributors of roket
47  files fs, ponderated by frac
48  params:
49  fs : (list) : list of hdf5 files opened with h5py
50  frac_per_layer : (dict) : frac for each layer
51  contributors : (list of string) : list of the contributors
52  return:
53  v : (np.array(dim=1)) : variance vector
54  """
55  f = fs[0]
56  P = f["P"][:]
57  nmodes = P.shape[0]
58  swap = np.arange(nmodes) - 2
59  swap[0:2] = [nmodes - 2, nmodes - 1]
60  err = f[contributors[0]][:] * 0.
61  for f in fs:
62  frac = frac_per_layer[f.attrs["_Param_atmos__.alt"][0]]
63  for c in contributors:
64  err += np.sqrt(frac) * f[c][:]
65 
66  return np.var(P.dot(err), axis=1) #[swap]
67 
68 
69 def cumulativeSR(v, Lambda_tar):
70  """ Returns the cumulative Strehl ratio over the modes from the variance
71  on each mode
72  params:
73  v : (np.array(dim=1)) : variance vector
74  return:
75  s : (np.array(dim=1)) : cumulative SR
76  """
77  s = np.cumsum(v)
78  s = np.exp(-s * (2 * np.pi / Lambda_tar)**2)
79 
80  return s
81 
82 
83 def get_cumSR(filename):
84  """
85  Compute the SR over the modes from the variance
86  on each mode
87 
88  :parameters:
89  filename: (str): path to the ROKET file
90  """
91  f = h5py.File(filename, 'r')
92  error_list = [
93  "noise", "aliasing", "tomography", "filtered modes", "non linearity",
94  "bandwidth"
95  ]
96  if (list(f.attrs.keys()).count("_Param_target__Lambda")):
97  Lambda = f.attrs["_Param_target__Lambda"][0]
98  else:
99  Lambda = 1.65
100  nactus = f["noise"][:].shape[0]
101  niter = f["noise"][:].shape[1]
102  P = f["P"][:]
103  nmodes = P.shape[0]
104  swap = np.arange(nmodes) - 2
105  swap[0:2] = [nmodes - 2, nmodes - 1]
106  data = np.zeros((nmodes, niter))
107  data2 = np.zeros(nmodes)
108 
109  for i in error_list:
110  data += np.dot(P, f[i][:])
111  data2 += np.var(np.dot(P, f[i][:]), axis=1)
112 
113  data = np.var(data, axis=1)
114  data = np.cumsum(data[swap])
115  data = np.exp(-data * (2 * np.pi / Lambda)**2)
116  data2 = np.cumsum(data2[swap])
117  data2 = np.exp(-data2 * (2 * np.pi / Lambda)**2)
118  data *= np.exp(-f["fitting"].value)
119  data2 *= np.exp(-f["fitting"].value)
120 
121  SR2 = np.ones(nmodes) * f["SR2"].value
122  SR = np.ones(nmodes) * f["SR"].value
123 
124  return data, data2, SR, SR2
125 
126 
127 def get_Btt(filename):
128  """
129  Return the Modes to Volt matrix
130  :parameters:
131  filename: (str): path to the ROKET file
132  """
133  f = h5py.File(filename, 'r')
134  return f["Btt"][:]
135 
136 
137 def get_P(filename):
138  """
139  Return the Volt to Modes matrix
140  :parameters:
141  filename: (str): path to the ROKET file
142  """
143  f = h5py.File(filename, 'r')
144  return f["P"][:]
145 
146 
147 def get_contribution(filename, contributor):
148  """
149  Return the variance of an error contributor
150 
151  :parameters:
152  filename: (str): path to the ROKET file
153  contributor: (str): contributor name
154  :return:
155  v: (np.array[ndim=1, dtype=np.float32]): variance of the contributor
156  """
157  f = h5py.File(filename, 'r')
158  P = f["P"][:]
159  nmodes = P.shape[0]
160  swap = np.arange(nmodes) - 2
161  swap[0:2] = [nmodes - 2, nmodes - 1]
162 
163  return np.var(np.dot(P, f[contributor][:]), axis=1) #[swap]
164 
165 
166 def get_err_contributors(filename, contributors):
167  """
168  Return the sum of the specified contributors error buffers
169 
170  :parameters:
171  filename: (str): path to the ROKET file
172  contributors: (list): list of contributors
173  :return:
174  err: (np.ndarray[ndim=2,dtype=np.float32]): Sum of the error buffers
175  """
176  f = h5py.File(filename, 'r')
177  # Get the sum of error contributors
178  err = f["noise"][:] * 0.
179  for c in contributors:
180  err += f[c][:]
181  f.close()
182 
183  return err
184 
185 
186 def get_err(filename):
187  """
188  Return the sum of all the error buffers
189 
190  :parameters:
191  filename: (str): path to the ROKET file
192  :return:
193  err: (np.ndarray[ndim=2,dtype=np.float32]): Sum of the error buffers
194  """
195 
196  f = h5py.File(filename, 'r')
197  # Get the sum of error contributors
198  err = f["noise"][:]
199  err += f["aliasing"][:]
200  err += f["tomography"][:]
201  err += f["filtered modes"][:]
202  err += f["non linearity"][:]
203  err += f["bandwidth"][:]
204  f.close()
205 
206  return err
207 
208 
209 def get_coverr_independence(filename):
210  """
211  Return the error covariance matrix considering statistical independence between contributors
212 
213  :parameters:
214  filename: (str): path to the ROKET file
215  :return:
216  err: (np.ndarray[ndim=2,dtype=np.float32]): Covariance matrix
217  """
218 
219  f = h5py.File(filename, 'r')
220  # Get the sum of error contributors
221  N = f["noise"][:].shape[1]
222  err = f["noise"][:].dot(f["noise"][:].T)
223  err += f["aliasing"][:].dot(f["aliasing"][:].T)
224  err += f["tomography"][:].dot(f["tomography"][:].T)
225  err += f["filtered modes"][:].dot(f["filtered modes"][:].T)
226  err += f["non linearity"][:].dot(f["non linearity"][:].T)
227  err += f["bandwidth"][:].dot(f["bandwidth"][:].T)
228  f.close()
229 
230  return err / N
231 
232 
233 def get_coverr_independence_contributors(filename, contributors):
234  """
235  Return the error covariance matrix considering statistical independence between specified contributors
236 
237  :parameters:
238  filename: (str): path to the ROKET file
239  contributors: (list): list of contributors
240  :return:
241  err: (np.ndarray[ndim=2,dtype=np.float32]): Covariance matrix
242  """
243 
244  f = h5py.File(filename, 'r')
245  # Get the sum of error contributors
246  N = f["noise"][:].shape[1]
247  err = np.zeros((f["noise"][:].shape[0], f["noise"][:].shape[0]))
248  for c in contributors:
249  err += f[c][:].dot(f[c][:].T)
250 
251  f.close()
252 
253  return err / N
254 
255 
256 def get_covmat_contrib(filename, contributors, modal=True):
257  """
258  Return the covariance matrix of the specified contributors
259 
260  :parameters:
261  filename: (str): path to the ROKET file
262  contributor: (list): name of a contributor of the ROKET file
263  modal: (bool): if True (default), return the matrix expressed in the modal basis
264  :return:
265  covmat: (np.ndarray(ndim=2, dtype=np.float32)): covariance matrix
266  """
267  h5f = h5py.File(filename, 'r')
268  contrib = h5f["bandwidth"][:] * 0.
269  for c in contributors:
270  contrib += h5f[c][:]
271  covmat = contrib.dot(contrib.T) / contrib.shape[1]
272  if modal:
273  P = h5f["P"][:]
274  covmat = P.dot(covmat).dot(P.T)
275  h5f.close()
276 
277  return covmat
278 
279 
280 def get_pup(filename):
281  """
282  Return the pupil saved in a ROKET file
283  :parameters:
284  filename: (str): path to the ROKET file
285  :return:
286  spup: (np.ndarray[ndim=2,dtype=np.float32]): pupil
287 
288  """
289  f = h5py.File(filename, 'r')
290  if (list(f.keys()).count("spup")):
291  spup = f["spup"][:]
292  else:
293  indx_pup = f["indx_pup"][:]
294  pup = np.zeros((f["dm_dim"].value, f["dm_dim"].value))
295  pup_F = pup.flatten()
296  pup_F[indx_pup] = 1.
297  pup = pup_F.reshape(pup.shape)
298  spup = pup[np.where(pup)[0].min():np.where(pup)[0].max() + 1,
299  np.where(pup)[1].min():np.where(pup)[1].max() + 1]
300 
301  f.close()
302  return spup
303 
304 
305 def get_breakdown(filename):
306  """
307  Computes the error breakdown in nm rms from a ROKET file
308 
309  :parameters:
310  filename: (str): path to the ROKET file
311  :return:
312  breakdown: (dict): dictionnary containing the error breakdown
313  """
314  f = h5py.File(filename, 'r')
315  P = f["P"][:]
316  noise = f["noise"][:]
317  trunc = f["non linearity"][:]
318  bp = f["bandwidth"][:]
319  tomo = f["tomography"][:]
320  aliasing = f["aliasing"][:]
321  filt = f["filtered modes"][:]
322  nmodes = P.shape[0]
323  swap = np.arange(nmodes) - 2
324  swap[0:2] = [nmodes - 2, nmodes - 1]
325  N = np.var(P.dot(noise), axis=1)
326  S = np.var(P.dot(trunc), axis=1)
327  B = np.var(P.dot(bp), axis=1)
328  T = np.var(P.dot(tomo), axis=1)
329  A = np.var(P.dot(aliasing), axis=1)
330  F = np.var(P.dot(filt), axis=1)
331  C = np.var(P.dot(filt + noise + trunc + bp + tomo + aliasing), axis=1)
332  inde = N + S + B + T + A + F
333 
334  if (list(f.attrs.keys()).count("_Param_target__Lambda")):
335  Lambda = f.attrs["_Param_target__Lambda"][0]
336  else:
337  Lambda = 1.65
338 
339  print("noise :", np.sqrt(np.sum(N)) * 1e3, " nm rms")
340  print("trunc :", np.sqrt(np.sum(S)) * 1e3, " nm rms")
341  print("bp :", np.sqrt(np.sum(B)) * 1e3, " nm rms")
342  print("tomo :", np.sqrt(np.sum(T)) * 1e3, " nm rms")
343  print("aliasing :", np.sqrt(np.sum(A)) * 1e3, " nm rms")
344  print("filt :", np.sqrt(np.sum(F)) * 1e3, " nm rms")
345  print("fitting :",
346  np.mean(np.sqrt(f["fitting"].value / ((2 * np.pi / Lambda)**2)) * 1e3),
347  " nm rms")
348  print("cross-terms :", np.sqrt(np.abs(np.sum(C) - np.sum(inde))) * 1e3, " nm rms")
349  return {
350  "noise":
351  np.sqrt(np.sum(N)) * 1e3,
352  "non linearity":
353  np.sqrt(np.sum(S)) * 1e3,
354  "bandwidth":
355  np.sqrt(np.sum(B)) * 1e3,
356  "tomography":
357  np.sqrt(np.sum(T)) * 1e3,
358  "aliasing":
359  np.sqrt(np.sum(A)) * 1e3,
360  "filtered modes":
361  np.sqrt(np.sum(F)) * 1e3,
362  "fitting":
363  np.mean(
364  np.sqrt(f["fitting"].value /
365  ((2 * np.pi / Lambda)**2)) * 1e3)
366  }
367 
368 
369 # def plotContributions(filename):
370 # f = h5py.File(filename, 'r')
371 # P = f["P"][:]
372 # noise = f["noise"][:]
373 # trunc = f["non linearity"][:]
374 # bp = f["bandwidth"][:]
375 # tomo = f["tomography"][:]
376 # aliasing = f["aliasing"][:]
377 # filt = f["filtered modes"][:]
378 # nmodes = P.shape[0]
379 # swap = np.arange(nmodes) - 2
380 # swap[0:2] = [nmodes - 2, nmodes - 1]
381 
382 # plt.figure()
383 # plt.plot(np.var(noise, axis=1), color="black")
384 # plt.plot(np.var(trunc, axis=1), color="green")
385 # plt.plot(np.var(bp, axis=1), color="red")
386 # plt.plot(np.var(tomo, axis=1), color="blue")
387 # plt.plot(np.var(aliasing, axis=1), color="cyan")
388 # plt.plot(np.var(filt, axis=1), color="magenta")
389 # plt.xlabel("Actuators")
390 # plt.ylabel("Variance [microns^2]")
391 # plt.title("Variance of estimated errors on actuators")
392 # plt.legend([
393 # "noise", "WFS non-linearity", "Bandwidth", "Anisoplanatism", "Aliasing",
394 # "Filtered modes"
395 # ])
396 
397 # plt.figure()
398 # N = np.var(P.dot(noise), axis=1)
399 # S = np.var(P.dot(trunc), axis=1)
400 # B = np.var(P.dot(bp), axis=1)
401 # T = np.var(P.dot(tomo), axis=1)
402 # A = np.var(P.dot(aliasing), axis=1)
403 # F = np.var(P.dot(filt), axis=1)
404 # plt.plot(N[swap], color="black")
405 # plt.plot(S[swap], color="green")
406 # plt.plot(B[swap], color="red")
407 # plt.plot(T[swap], color="blue")
408 # plt.plot(A[swap], color="cyan")
409 # plt.plot(F[swap], color="magenta")
410 # plt.xlabel("Modes")
411 # plt.ylabel("Variance [microns^2]")
412 # plt.yscale("log")
413 # plt.title("Variance of estimated errors on modal basis B")
414 
415 # if (list(f.attrs.keys()).count("_Param_target__Lambda")):
416 # Lambda = f.attrs["_Param_target__Lambda"][0]
417 # else:
418 # Lambda = 1.65
419 
420 # print("noise :",
421 # np.sqrt(np.sum(N)) * 1e3, " nm, ", "SR : ",
422 # np.exp(-np.sum(N) * (2 * np.pi / Lambda)**2))
423 # print("trunc :",
424 # np.sqrt(np.sum(S)) * 1e3, " nm, ", "SR : ",
425 # np.exp(-np.sum(S) * (2 * np.pi / Lambda)**2))
426 # print("bp :",
427 # np.sqrt(np.sum(B)) * 1e3, " nm, ", "SR : ",
428 # np.exp(-np.sum(B) * (2 * np.pi / Lambda)**2))
429 # print("tomo :",
430 # np.sqrt(np.sum(T)) * 1e3, " nm, ", "SR : ",
431 # np.exp(-np.sum(T) * (2 * np.pi / Lambda)**2))
432 # print("aliasing :",
433 # np.sqrt(np.sum(A)) * 1e3, " nm, ", "SR : ",
434 # np.exp(-np.sum(A) * (2 * np.pi / Lambda)**2))
435 # print("filt :",
436 # np.sqrt(np.sum(F)) * 1e3, " nm, ", "SR : ",
437 # np.exp(-np.sum(F) * (2 * np.pi / Lambda)**2))
438 # print("fitting :",
439 # np.sqrt(f["fitting"].value / ((2 * np.pi / Lambda)**2)) * 1e3, " nm, ",
440 # "SR : ", np.exp(-f["fitting"].value))
441 # #plt.legend(["noise","WFS non-linearity","Bandwidth","Anisoplanatism","Aliasing","Filtered modes"])
442 
443 
444 def plotCovCor(filename, maparico=None):
445  """
446  Displays the covariance and correlation matrix between the contributors
447  :parameters:
448  filename: (str): path to the ROKET file
449  maparico: (str): (optional) matplotlib colormap to use
450 
451  """
452  f = h5py.File(filename, 'r')
453  cov = f["cov"][:]
454  cor = f["cor"][:]
455 
456  labels = ["noise", "WF deviation", "aliasing", "filt. modes", "bandwidth", "aniso"]
457  if (maparico is None):
458  maparico = "viridis"
459  x = np.arange(6)
460  plt.matshow(cov, cmap=maparico)
461  plt.colorbar()
462  plt.xticks(x, labels, rotation="vertical")
463  plt.yticks(x, labels)
464 
465  plt.matshow(cor, cmap=maparico)
466  plt.colorbar()
467  plt.xticks(x, labels, rotation="vertical")
468  plt.yticks(x, labels)
469  print("Total variance : ", cov.sum(), " microns^2")
470 
471 
472 def get_IF(filename):
473  """
474  Return the influence functions of the pzt and tt DM saved in a ROKET file
475  :parameters:
476  filename: (str): path to the ROKET file
477  :return:
478  IF: (csr_matrix): pzt influence function (sparse)
479  T: (np.ndarray[ndim=2,dtype=np.float32]): tip tilt influence function
480  """
481  f = h5py.File(filename, 'r')
482  IF = csr_matrix((f["IF.data"][:], f["IF.indices"][:], f["IF.indptr"][:]))
483  if (list(f.keys()).count("TT")):
484  T = f["TT"][:]
485  else:
486  T = IF[-2:, :].toarray()
487  IF = IF[:-2, :]
488  f.close()
489  return IF, T.T.astype(np.float32)
490 
491 
492 def get_mode(filename, n):
493  """
494  Return the #n mode of the Btt modal basis contains in a ROKET file
495  :parameters:
496  filename: (str): path to the ROKET file
497  n: (int): mode number
498  :return:
499  sc: (np.ndarray[ndim=2,dtype=np.float32]): mode #n of the Btt basis
500  """
501  f = h5py.File(filename, 'r')
502  Btt = f["Btt"][:]
503  IF, TT = get_IF(filename)
504  dim = f["dm_dim"].value
505  indx = f["indx_pup"][:]
506  sc = np.zeros((dim, dim))
507  sc = sc.flatten()
508  mode = IF.T.dot(Btt[:-2, n])
509  mode += TT.T.dot(Btt[-2:, n])
510  sc[indx] = mode
511 
512  return sc.reshape((dim, dim))
513 
514 
515 def get_tar_image(filename):
516  """
517  Return the PSF computed by COMPASS saved in the ROKET file
518 
519  :parameters:
520  filename: (str): path to the ROKET file
521  :return:
522  psf: (np.ndarray[ndim=2,dtype=np.float32]): PSF computed by COMPASS
523  """
524  f = h5py.File(filename, "r")
525  psf = f["psf"][:]
526  f.close()
527 
528  return psf
529 
530 
531 def getMap(filename, covmat):
532  """
533  Return the spatial representation of a covariance matrix expressed in the DM space
534  :parameters:
535  filename: (str): path to the ROKET file
536  covmat: (np.ndarray[ndim=2,dtype=np.float32]): covariance matrix
537  :return:
538  Map: (np.ndarray[ndim=2,dtype=np.float32]): covariance map
539  """
540  f = h5py.File(filename, 'r')
541  # nn, c'est, en gros un where(actus==valides)
542  xpos = f["dm.xpos"][:]
543  ypos = f["dm.ypos"][:]
544  pitch = xpos[1] - xpos[0]
545  nact = f.attrs["_Param_dm__nact"][0]
546  x = ((xpos - xpos.min()) / pitch).astype(np.int32)
547  y = ((ypos - ypos.min()) / pitch).astype(np.int32)
548  nn = (x, y)
549 
550  #creation du tableau des decalages
551  #xx et yy c'est les cood des actus valides
552  #dx et dy c'est la matrice des differences de coordonnees, entre -nssp et +nssp
553  xx = np.tile(np.arange(nact), (nact, 1))
554  yy = xx.T
555  dx = np.zeros((x.size, x.size), dtype=np.int32)
556  dy = dx.copy()
557  for k in range(x.size):
558  dx[k, :] = xx[nn][k] - xx[nn]
559  dy[k, :] = yy[nn][k] - yy[nn]
560 
561  # transformation des decalages en indice de tableau
562  dx += (nact - 1)
563  dy += (nact - 1)
564 
565  # transformation d'un couple de decalages (dx,dy) en un indice du tableau 'Map'
566  Map = np.zeros((nact * 2 - 1, nact * 2 - 1)).flatten()
567  div = Map.copy()
568  ind = dy.flatten() + (nact * 2 - 1) * (dx.flatten())
569  Cf = covmat.flatten()
570  for k in range(ind.size):
571  Map[ind[k]] += Cf[k]
572  div[ind[k]] += 1
573 
574  div[np.where(div == 0)] = 1
575  Map /= div
576 
577  return Map.reshape((nact * 2 - 1, nact * 2 - 1))
578 
579 
580 def SlopesMap(covmat, filename=None, nssp=None, validint=None):
581  """
582  Return a part of the spatial representation of a covariance matrix expressed in the slopes space.
583  Need to be called 4 times to get the full map (XX, YY, XY, YX)
584 
585  :parameters:
586  covmat: (np.ndarray[ndim=2,dtype=np.float32]): part of the covariance matrix
587  filename: (str): (optional) path to the ROKET file
588  nssp: (int): (optional) Number of ssp in the diameter
589  validint: (float): (optional) Central obstruction as a ratio of D
590 
591  :return:
592  Map: (np.ndarray[ndim=2,dtype=np.float32]): covariance map
593  """
594  if filename is not None:
595  f = h5py.File(filename, 'r')
596  nssp = f.attrs["_Param_wfs__nxsub"][0]
597  validint = f.attrs["_Param_tel__cobs"]
598  f.close()
599 
600  if nssp is None or validint is None:
601  raise ValueError("nssp and validint not defined")
602 
603  nsub = covmat.shape[0]
604  x = np.linspace(-1, 1, nssp)
605  x, y = np.meshgrid(x, x)
606  r = np.sqrt(x * x + y * y)
607 
608  rorder = np.sort(r.reshape(nssp * nssp))
609  ncentral = nssp * nssp - np.sum(r >= validint, dtype=np.int32)
610  validext = rorder[ncentral + nsub]
611  valid = (r < validext) & (r >= validint)
612  nn = np.where(valid)
613 
614  xx = np.tile(np.arange(nssp), (nssp, 1))
615  yy = xx.T
616  xx = xx[nn]
617  yy = yy[nn]
618  dx = np.zeros((xx.size, xx.size), dtype=np.int32)
619  dy = dx.copy()
620  for k in range(xx.size):
621  dx[k, :] = xx[k] - xx
622  dy[k, :] = yy[k] - yy
623 
624  # transformation des decalages en indice de tableau
625  dx += (nssp - 1)
626  dy += (nssp - 1)
627 
628  # transformation d'un couple de decalages (dx,dy) en un indice du tableau 'Map'
629  Map = np.zeros((nssp * 2 - 1, nssp * 2 - 1)).flatten()
630  div = Map.copy()
631  ind = dy.flatten() + (nssp * 2 - 1) * (dx.flatten())
632  Cf = covmat.flatten()
633  for k in range(ind.size):
634  Map[ind[k]] += Cf[k]
635  div[ind[k]] += 1
636 
637  div[np.where(div == 0)] = 1
638  Map /= div
639 
640  return Map.reshape((nssp * 2 - 1, nssp * 2 - 1))
641 
642 
643 def covFromMap(Map, nsub, filename=None, nssp=None, validint=None):
644  """
645  Return a part of the spatial representation of a covariance matrix expressed in the slopes space.
646  Need to be called 4 times to get the full map (XX, YY, XY, YX)
647 
648  :parameters:
649  covmat: (np.ndarray[ndim=2,dtype=np.float32]): part of the covariance matrix
650  filename: (str): (optional) path to the ROKET file
651  nssp: (int): (optional) Number of ssp in the diameter
652  validint: (float): (optional) Central obstruction as a ratio of D
653 
654  :return:
655  Map: (np.ndarray[ndim=2,dtype=np.float32]): covariance map
656  """
657  if filename is not None:
658  f = h5py.File(filename, 'r')
659  nssp = f.attrs["_Param_wfs__nxsub"][0]
660  validint = f.attrs["_Param_tel__cobs"]
661  f.close()
662 
663  if nssp is None or validint is None:
664  raise ValueError("nssp and validint not defined")
665 
666  x = np.linspace(-1, 1, nssp)
667  x, y = np.meshgrid(x, x)
668  r = np.sqrt(x * x + y * y)
669 
670  rorder = np.sort(r.reshape(nssp * nssp))
671  ncentral = nssp * nssp - np.sum(r >= validint, dtype=np.int32)
672  validext = rorder[ncentral + nsub]
673  valid = (r < validext) & (r >= validint)
674  nn = np.where(valid)
675 
676  xx = np.tile(np.arange(nssp), (nssp, 1))
677  yy = xx.T
678  xx = xx[nn]
679  yy = yy[nn]
680  dx = np.zeros((xx.size, xx.size), dtype=np.int32)
681  dy = dx.copy()
682  for k in range(xx.size):
683  dx[k, :] = xx[k] - xx
684  dy[k, :] = yy[k] - yy
685 
686  # transformation des decalages en indice de tableau
687  dx += (nssp - 1)
688  dy += (nssp - 1)
689 
690  # transformation d'un couple de decalages (dx,dy) en un indice du tableau 'Map'
691  covmat = np.zeros((nsub, nsub))
692  ind = dy.flatten() + (nssp * 2 - 1) * (dx.flatten())
693  Cf = covmat.flatten()
694  Map = Map.flatten()
695  for k in range(ind.size):
696  Cf[k] = Map[ind[k]]
697 
698  return Cf.reshape((nsub, nsub))
699 
700 
701 def getCovFromMap(Map, nsub, filename=None, nssp=None, validint=None):
702  """
703  Return the full spatial representation of a covariance matrix expressed in the slopes space.
704 
705  :parameters:
706  covmat: (np.ndarray[ndim=2,dtype=np.float32]): part of the covariance matrix
707  filename: (str): (optional) path to the ROKET file
708  nssp: (int): (optional) Number of ssp in the diameter
709  validint: (float): (optional) Central obstruction as a ratio of D
710  :return:
711  Map: (np.ndarray[ndim=2,dtype=np.float32]): covariance map
712  """
713  if filename is not None:
714  f = h5py.File(filename, 'r')
715  nssp = f.attrs["_Param_wfs__nxsub"][0]
716  f.close()
717  mapSize = 2 * nssp - 1
718  covmat = np.zeros((nsub, nsub))
719 
720  covmat[:nsub // 2, :nsub // 2] = covFromMap(Map[:mapSize, :mapSize], nsub // 2,
721  filename=filename)
722  covmat[nsub // 2:, nsub // 2:] = covFromMap(Map[mapSize:, mapSize:], nsub // 2,
723  filename=filename)
724  covmat[:nsub // 2, nsub // 2:] = covFromMap(Map[:mapSize, mapSize:], nsub // 2,
725  filename=filename)
726  covmat[nsub // 2:, :nsub // 2] = covFromMap(Map[mapSize:, :mapSize], nsub // 2,
727  filename=filename)
728 
729  return covmat
730 
731 
732 def get_slopessMap(covmat, filename=None, nssp=None, validint=None):
733  """
734  Return the full spatial representation of a covariance matrix expressed in the slopes space.
735 
736  :parameters:
737  covmat: (np.ndarray[ndim=2,dtype=np.float32]): part of the covariance matrix
738  filename: (str): (optional) path to the ROKET file
739  nssp: (int): (optional) Number of ssp in the diameter
740  validint: (float): (optional) Central obstruction as a ratio of D
741  :return:
742  Map: (np.ndarray[ndim=2,dtype=np.float32]): covariance map
743  """
744  if filename is not None:
745  f = h5py.File(filename, 'r')
746  nssp = f.attrs["_Param_wfs__nxsub"][0]
747  f.close()
748  nsub = covmat.shape[0] // 2
749  mapSize = 2 * nssp - 1
750  Map = np.zeros((2 * mapSize, 2 * mapSize))
751 
752  Map[:mapSize, :mapSize] = SlopesMap(covmat[:nsub, :nsub], filename=filename,
753  nssp=nssp, validint=validint)
754  Map[:mapSize, mapSize:] = SlopesMap(covmat[:nsub, nsub:], filename=filename,
755  nssp=nssp, validint=validint)
756  Map[mapSize:, :mapSize] = SlopesMap(covmat[nsub:, :nsub], filename=filename,
757  nssp=nssp, validint=validint)
758  Map[mapSize:, mapSize:] = SlopesMap(covmat[nsub:, nsub:], filename=filename,
759  nssp=nssp, validint=validint)
760 
761  return Map
762 
763 
764 def ensquare_PSF(filename, psf, N, display=False, cmap="jet"):
765  """
766  Return the ensquared PSF
767 
768  :parameters:
769  filename: (str): path to the ROKET file
770  psf: (np.ndarray[ndim=2,dtype=np.float32]): PSF to ensquare
771  N: (int): size of the square in units of Lambda/D
772  display: (bool): (optional) if True, displays also the ensquare PSF
773  cmat: (str): (optional) matplotlib colormap to use
774  :return:
775  psf: (np.ndarray[ndim=2,dtype=np.float32]): the ensquared psf
776  """
777  f = h5py.File(filename, 'r')
778  Lambda_tar = f.attrs["_Param_target__Lambda"][0]
779  RASC = 180 / np.pi * 3600.
780  pixsize = Lambda_tar * 1e-6 / (psf.shape[0] * f.attrs["_Param_tel__diam"] / f.attrs[
781  "_Param_geom__pupdiam"]) * RASC
782  x = (np.arange(psf.shape[0]) - psf.shape[0] / 2) * pixsize / (
783  Lambda_tar * 1e-6 / f.attrs["_Param_tel__diam"] * RASC)
784  w = int(N * (Lambda_tar * 1e-6 / f.attrs["_Param_tel__diam"] * RASC) / pixsize)
785  mid = psf.shape[0] // 2
786  psfe = np.abs(psf[mid - w:mid + w, mid - w:mid + w])
787  if (display):
788  plt.matshow(np.log10(psfe), cmap=cmap)
789  plt.colorbar()
790  xt = np.linspace(0, psfe.shape[0] - 1, 6).astype(np.int32)
791  yt = np.linspace(-N, N, 6).astype(np.int32)
792  plt.xticks(xt, yt)
793  plt.yticks(xt, yt)
794 
795  f.close()
796  return psf[mid - w:mid + w, mid - w:mid + w]
797 
798 
799 def ensquared_energy(filename, psf, N):
800  """
801  Return the ensquared energy in a box width of N * lambda/D
802 
803  :parameters:
804  filename: (str): path to the ROKET file
805  N: (int): size of the square in units of Lambda/D
806  """
807  return ensquare_PSF(filename, psf, N).sum() / psf.sum()
808 
809 
810 def cutsPSF(filename, psf, psfs):
811  """
812  Plots cuts of two PSF along X and Y axis for comparison
813  :parameters:
814  filename: (str): path to the ROKET file
815  psf: (np.ndarray[ndim=2,dtype=np.float32]): first PSF
816  psfs: (np.ndarray[ndim=2,dtype=np.float32]): second PSF
817  """
818  f = h5py.File(filename, 'r')
819  Lambda_tar = f.attrs["_Param_target__Lambda"][0]
820  RASC = 180 / np.pi * 3600.
821  pixsize = Lambda_tar * 1e-6 / (psf.shape[0] * f.attrs["_Param_tel__diam"] / f.attrs[
822  "_Param_geom__pupdiam"]) * RASC
823  x = (np.arange(psf.shape[0]) - psf.shape[0] / 2) * pixsize / (
824  Lambda_tar * 1e-6 / f.attrs["_Param_tel__diam"] * RASC)
825  plt.figure()
826  plt.subplot(2, 1, 1)
827  plt.semilogy(x, psf[psf.shape[0] // 2, :], color="blue")
828  plt.semilogy(x, psfs[psf.shape[0] // 2, :], color="red")
829  plt.semilogy(x,
830  np.abs(psf[psf.shape[0] // 2, :] - psfs[psf.shape[0] // 2, :]),
831  color="green")
832  plt.xlabel("X-axis angular distance [units of lambda/D]")
833  plt.ylabel("Normalized intensity")
834  plt.legend(["PSF exp", "PSF model", "Diff"])
835  plt.xlim(-20, 20)
836  plt.ylim(1e-7, 1)
837  plt.subplot(2, 1, 2)
838  plt.semilogy(x, psf[:, psf.shape[0] // 2], color="blue")
839  plt.semilogy(x, psfs[:, psf.shape[0] // 2], color="red")
840  plt.semilogy(x,
841  np.abs(psf[:, psf.shape[0] // 2] - psfs[:, psf.shape[0] // 2]),
842  color="green")
843  plt.xlabel("Y-axis angular distance [units of lambda/D]")
844  plt.ylabel("Normalized intensity")
845  plt.legend(["PSF exp", "PSF model", "Diff"])
846  plt.xlim(-20, 20)
847  plt.ylim(1e-7, 1)
848  f.close()
849 
850 
851 def compDerivativeCmm(filename=None, slopes=None, dt=1, dd=False, ss=False):
852  """
853  Compute d/dt(slopes)*slopes from ROKET buffer
854  :parameters:
855  filename: (str): (optional) path to the ROKET file
856  slopes: (np.ndarray[ndim=2,dtype=np.float32]: (optional) Buffer of slopes arranged as (nsub x niter)
857  dt: (int): (optionnal) dt in frames
858  dd: (bool): (optionnal) if True, computes d/dt(slopes)*d/dt(slopes)
859  :return:
860  dCmm: (np.ndarray[ndim=2,dtype=np.float32]: covariance matrix of slopes with their derivative
861  """
862  if filename is not None:
863  f = h5py.File(filename, 'r')
864  slopes = f["slopes"][:]
865  f.close()
866  if slopes is not None:
867  if dd:
868  dCmm = (slopes[:, dt:] - slopes[:, :-dt]).dot(
869  (slopes[:, dt:] - slopes[:, :-dt]).T / 2)
870  elif ss:
871  dCmm = slopes[:, :-dt].dot(slopes[:, dt:].T)
872  else:
873  dCmm = (slopes[:, dt:] - slopes[:, :-dt]).dot(
874  (slopes[:, dt:] + slopes[:, :-dt]).T / 2)
875 
876  return dCmm / slopes[:, dt:].shape[1]
877 
878 
879 def compProfile(filename, nlayers):
880  """
881  Identify turbulent parameters (wind speed, direction and frac. of r0) from ROKET file
882 
883  :parameters:
884  filename: (str): path to the ROKET file
885  nlayers: (int): number of turbulent layers (maybe deduced in the future ?)
886  """
887  f = h5py.File(filename, "r")
888  dt = f.attrs["_Param_loop__ittime"]
889  dk = int(2 / 3 * f.attrs["_Param_tel__diam"] / 20 / dt)
890  pdiam = f.attrs["_Param_tel__diam"] / f.attrs["_Param_wfs__nxsub"]
891 
892  mapC = get_slopessMap(compDerivativeCmm(filename, dt=dk), filename)
893  size = mapC.shape[0] // 2
894  minimap = mapC[size:, size:] + mapC[size:, :size] + mapC[:size,
895  size:] + mapC[:size, :size]
896 
897  ws = np.zeros(nlayers)
898  wd = np.zeros(nlayers)
899  frac = np.zeros(nlayers)
900 
901  for k in range(nlayers):
902  plt.matshow(minimap)
903  plt.title(str(k))
904  x, y = np.where(minimap == minimap.max())
905  x = int(x)
906  y = int(y)
907  print("max ", k, ": x=", x, " ; y=", y)
908  frac[k] = minimap[x, y]
909  r = np.linalg.norm([x - size / 2, y - size / 2]) * pdiam
910  ws[k] = r / (dk * dt)
911  wd[k] = np.arctan2(x - size / 2, y - size / 2) * 180 / np.pi
912  if (wd[k] < 0):
913  wd[k] += 360
914  minimap[x - 2:x + 3, y - 2:y + 3] = 0
915  minimap[(size - x - 1) - 2:(size - x - 1) + 3, (size - y - 1) - 2:
916  (size - y - 1) + 3] = 0
917  frac /= frac.sum()
918 
919  ind = np.argsort(f.attrs["_Param_atmos__frac"])[::-1]
920  print("Real wind speed: ", f.attrs["_Param_atmos__windspeed"][ind].tolist())
921  print("Estimated wind speed: ", ws.tolist())
922  print("-----------------------------")
923  print("Real wind direction: ", f.attrs["_Param_atmos__winddir"][ind].tolist())
924  print("Estimated wind direction: ", wd.tolist())
925  print("-----------------------------")
926  print("Real frac: ", f.attrs["_Param_atmos__frac"][ind].tolist())
927  print("Estimated frac: ", frac.tolist())
928  print("-----------------------------")
929  f.close()
guardians.drax.covFromMap
def covFromMap(Map, nsub, filename=None, nssp=None, validint=None)
Definition: drax.py:656
guardians.drax.get_err_contributors
def get_err_contributors(filename, contributors)
Return the sum of the specified contributors error buffers.
Definition: drax.py:175
guardians.drax.varianceMultiFiles
def varianceMultiFiles(fs, frac_per_layer, contributors)
Return the variance computed from the sum of contributors of roket files fs, ponderated by frac param...
Definition: drax.py:54
guardians.drax.get_cumSR
def get_cumSR(filename)
Definition: drax.py:90
guardians.drax.cumulativeSR
def cumulativeSR(v, Lambda_tar)
Returns the cumulative Strehl ratio over the modes from the variance on each mode params: v (np....
Definition: drax.py:76
guardians.drax.getMap
def getMap(filename, covmat)
Definition: drax.py:539
guardians.drax.ensquare_PSF
def ensquare_PSF(filename, psf, N, display=False, cmap="jet")
Return the ensquared PSF.
Definition: drax.py:776
guardians.drax.getCovFromMap
def getCovFromMap(Map, nsub, filename=None, nssp=None, validint=None)
Return the full spatial representation of a covariance matrix expressed in the slopes space.
Definition: drax.py:712
guardians.drax.get_coverr_independence_contributors
def get_coverr_independence_contributors(filename, contributors)
Return the error covariance matrix considering statistical independence between specified contributor...
Definition: drax.py:242
guardians.drax.get_contribution
def get_contribution(filename, contributor)
Return the variance of an error contributor.
Definition: drax.py:156
guardians.drax.ensquared_energy
def ensquared_energy(filename, psf, N)
Return the ensquared energy in a box width of N * lambda/D.
Definition: drax.py:806
guardians.drax.SlopesMap
def SlopesMap(covmat, filename=None, nssp=None, validint=None)
Definition: drax.py:593
guardians.drax.get_P
def get_P(filename)
Definition: drax.py:142
guardians.drax.get_Btt
def get_Btt(filename)
Definition: drax.py:132
guardians.drax.variance
def variance(f, contributors, method="Default")
Return the error variance of specified contributors params: f (h5py.File) : roket hdf5 file opened wi...
Definition: drax.py:23
guardians.drax.compProfile
def compProfile(filename, nlayers)
Identify turbulent parameters (wind speed, direction and frac.
Definition: drax.py:886
guardians.drax.plotCovCor
def plotCovCor(filename, maparico=None)
Definition: drax.py:451
guardians.drax.get_tar_image
def get_tar_image(filename)
Return the PSF computed by COMPASS saved in the ROKET file.
Definition: drax.py:523
guardians.drax.cutsPSF
def cutsPSF(filename, psf, psfs)
Definition: drax.py:817
guardians.drax.get_mode
def get_mode(filename, n)
Definition: drax.py:500
guardians.drax.compDerivativeCmm
def compDerivativeCmm(filename=None, slopes=None, dt=1, dd=False, ss=False)
Definition: drax.py:861
guardians.drax.get_IF
def get_IF(filename)
Definition: drax.py:480
guardians.drax.get_coverr_independence
def get_coverr_independence(filename)
Return the error covariance matrix considering statistical independence between contributors.
Definition: drax.py:217
guardians.drax.get_slopessMap
def get_slopessMap(covmat, filename=None, nssp=None, validint=None)
Return the full spatial representation of a covariance matrix expressed in the slopes space.
Definition: drax.py:743
guardians.drax.get_breakdown
def get_breakdown(filename)
Computes the error breakdown in nm rms from a ROKET file.
Definition: drax.py:313
guardians.drax.get_covmat_contrib
def get_covmat_contrib(filename, contributors, modal=True)
Return the covariance matrix of the specified contributors.
Definition: drax.py:266
guardians.drax.get_err
def get_err(filename)
Return the sum of all the error buffers.
Definition: drax.py:194
guardians.drax.get_pup
def get_pup(filename)
Definition: drax.py:288