41 from carmaWrap
import obj_float, obj_double, obj_int, obj_float_complex, obj_double_complex, obj_uint16
43 from carmaWrap
import obj_half
56 return repr(self.
value)
61 def __init__(self, data=None, shape=None, dtype=None):
65 if isinstance(shape, tuple):
66 data = np.zeros(shape, dtype=dtype)
68 raise TypeError(
"shape must be a tuple")
71 if isinstance(data, list):
73 if isinstance(data, np.ndarray):
75 data = data.astype(dtype)
76 if data.dtype == np.int64
or data.dtype == np.int32:
77 self.
__data = obj_int(context.context, data)
78 elif data.dtype == np.float32:
79 self.
__data = obj_float(context.context, data)
80 elif data.dtype == np.float64:
81 self.
__data = obj_double(context.context, data)
82 elif USE_HALF
and data.dtype == np.float16:
83 self.
__data = obj_half(context.context, data)
84 elif data.dtype == np.complex64:
85 self.
__data = obj_float_complex(context.context, data)
86 elif data.dtype == np.complex128:
87 self.
__data = obj_double_complex(context.context, data)
89 raise TypeError(
"Data type not implemented")
92 elif isinstance(data, obj_int):
95 self.
__shape = tuple(data.shape[k]
for k
in range(len(data.shape)))
96 elif isinstance(data, obj_float):
99 self.
__shape = tuple(data.shape[k]
for k
in range(len(data.shape)))
100 elif isinstance(data, obj_double):
103 self.
_shape = tuple(data.shape[k]
for k
in range(len(data.shape)))
104 elif USE_HALF
and isinstance(data, obj_half):
107 self.
__shape = tuple(data.shape[k]
for k
in range(len(data.shape)))
108 elif isinstance(data, obj_float_complex):
111 self.
__shape = tuple(data.shape[k]
for k
in range(len(data.shape)))
112 elif isinstance(data, obj_double_complex):
115 self.
__shape = tuple(data.shape[k]
for k
in range(len(data.shape)))
116 elif isinstance(data, obj_uint16):
119 self.
__shape = tuple(data.shape[k]
for k
in range(len(data.shape)))
121 raise TypeError(
"Data must be a list, a numpy array or a carmaWrap.obj")
124 raise AttributeError(
"You must provide data or shape at least")
126 shape = property(
lambda x: x.__shape)
127 dtype = property(
lambda x: x.__dtype)
128 data = property(
lambda x: x.__data)
135 if isinstance(idata, Array):
136 tmp.data.axpy(1, idata.data)
137 elif isinstance(idata, np.ndarray):
138 tmp.data.axpy(1,
Array(idata).data)
140 raise TypeError(
"operator + is defined only between Arrays and np.arrays")
145 if isinstance(idata, Array):
146 tmp.data.axpy(-1, idata.data)
147 elif isinstance(idata, np.ndarray):
148 tmp.data.axpy(-1,
Array(idata).data)
150 raise TypeError(
"operator + is defined only between Arrays and np.arrays")
154 if isinstance(idata, float)
or isinstance(idata, int):
156 tmp.data.scale(idata)
159 raise NotImplementedError(
"Operator not implemented yet")
166 tmp.data.copy_from(self.
data)
170 if isinstance(idata, np.ndarray):
171 if idata.dtype == self.
dtype:
174 raise TypeError(
"Data types must be the same for both arrays")
175 if isinstance(idata, Array):
176 if len(self.
shape) == 1:
177 if len(idata.shape) == 1:
178 if idata.shape == self.
shape:
179 result = self.
data.
dot(idata.data, 1, 1)
182 elif len(idata.shape) == 2:
183 if idata.shape[0] == self.
shape[0]:
184 result =
Array(idata.data.gemv(self.
data, op=
'T'))
189 elif len(self.
shape) == 2:
190 if len(idata.shape) == 1:
191 if idata.shape[0] == self.
shape[1]:
192 result =
Array(self.
data.gemv(idata.data))
195 elif len(idata.shape) == 2:
196 if self.
shape[1] == idata.shape[0]:
197 result =
Array(self.
data.gemm(idata.data))
206 return self.
data.aimax()
212 return self.
data.aimin()
221 tmp = np.array(self.
data)
225 def ones(shape, dtype=np.float32):
226 return Array(np.ones(shape, dtype=dtype))
230 return Array(np.zeros(shape, dtype=dtype))