Skip to content
Snippets Groups Projects
Commit a7032fc7 authored by Mads M. Pedersen's avatar Mads M. Pedersen
Browse files

replace lambda function in list indexer and remove xr.dataset attribute (cannot be pickled)

parent acc3bcae
No related branches found
No related tags found
No related merge requests found
......@@ -186,6 +186,14 @@ class FugaBlockage(All2AllIterative):
turbulenceModel=turbulenceModel, convergence_tolerance=convergence_tolerance)
class list_indexer:
def __init__(self, lst):
self.lst = lst
def __call__(self, x):
return np.searchsorted(self.lst, np.minimum(x, self.lst[-1]))
class FugaMultiLUTDeficit(FugaDeficit):
def __init__(self, LUT_path_lst=tfp + 'fuga/*.nc', remove_wriggles=False,
method='linear', rotorAvgModel=None, groundModel=None):
......@@ -199,37 +207,38 @@ class FugaMultiLUTDeficit(FugaDeficit):
return ds
if isinstance(LUT_path_lst, str):
self.ds_lst = [open_dataset(f) for f in glob.glob(LUT_path_lst)]
ds_lst = [open_dataset(f) for f in glob.glob(LUT_path_lst)]
else:
self.ds_lst = [open_dataset(f) for f in LUT_path_lst]
ds_lst = [open_dataset(f) for f in LUT_path_lst]
x_lst, y_lst, z_lst = [self.ds_lst[0][k].values for k in 'xyz']
assert np.all([np.all(ds.x == self.ds_lst[0].x) for ds in self.ds_lst])
assert np.all([np.all(ds.y == self.ds_lst[0].y) for ds in self.ds_lst])
assert np.all([np.all(ds.z == self.ds_lst[0].z) for ds in self.ds_lst])
assert np.all([np.all(ds.z0 == self.ds_lst[0].z0) for ds in self.ds_lst])
assert np.all([np.all(ds.zeta0 == self.ds_lst[0].zeta0) for ds in self.ds_lst])
self.z0 = self.ds_lst[0].z0.item()
self.zeta0 = self.ds_lst[0].zeta0.item()
x_lst, y_lst, z_lst = [ds_lst[0][k].values for k in 'xyz']
assert np.all([np.all(ds.x == ds_lst[0].x) for ds in ds_lst])
assert np.all([np.all(ds.y == ds_lst[0].y) for ds in ds_lst])
assert np.all([np.all(ds.z == ds_lst[0].z) for ds in ds_lst])
assert np.all([np.all(ds.z0 == ds_lst[0].z0) for ds in ds_lst])
assert np.all([np.all(ds.zeta0 == ds_lst[0].zeta0) for ds in ds_lst])
self.x = ds_lst[0].x.values
self.y = ds_lst[0].y.values
self.z = ds_lst[0].z.values
self.z0 = ds_lst[0].z0.item()
self.zeta0 = ds_lst[0].zeta0.item()
data = np.concatenate([self.init_lut(ds.UL.values, ds.hubheight.item(), remove_wriggles=remove_wriggles)[na]
for ds in self.ds_lst], 0)
for ds in ds_lst], 0)
i_lst = np.arange(len(self.ds_lst))
i_lst = np.arange(len(ds_lst))
self.interpolator = GridInterpolator([i_lst, z_lst, y_lst, x_lst], data,
method=['nearest', 'linear', 'linear', 'linear'])
def list_indexer(lst):
return lambda x, lst=lst: np.searchsorted(lst, np.minimum(x, lst[-1]))
d_lst = np.sort(np.unique([ds.diameter.item() for ds in self.ds_lst]))
h_lst = np.sort(np.unique([ds.hubheight.item() for ds in self.ds_lst]))
# ti_lst = np.sort(np.unique([ds.TI.item() for ds in self.ds_lst]))
d_lst = np.sort(np.unique([ds.diameter.item() for ds in ds_lst]))
h_lst = np.sort(np.unique([ds.hubheight.item() for ds in ds_lst]))
# ti_lst = np.sort(np.unique([ds.TI.item() for ds in ds_lst]))
d_index, h_index = [list_indexer(lst) for lst in [d_lst, h_lst]]
# ti_searcher =
index_arr = np.full((len(d_lst), len(h_lst)), -1)
for i, ds in enumerate(self.ds_lst):
for i, ds in enumerate(ds_lst):
index_arr[d_index(ds.diameter.item()), h_index(ds.hubheight.item())] = i
self.index_arr = index_arr
self.d_index, self.h_index = d_index, h_index
......
......@@ -349,7 +349,7 @@ def test_FugaMultiLUTDeficit(LUT_path_lst):
y = x * 0
sim_res = wfm(x, y, type=[0, 1], wd=[90, 240, 270])
# sim_res = wfm([0], [0], type=[0], wd=[90, 240, 270])
z = deficitModel.ds_lst[0].z
z = deficitModel.z
if 0:
print(np.round(sim_res.flow_map(grid=XZGrid(y=0, x=[200], z=z), wd=[90, 270]).WS_eff.squeeze()[::2], 2).T)
......@@ -367,6 +367,37 @@ def test_FugaMultiLUTDeficit(LUT_path_lst):
8.66, 9.48, 10.59, 11.42, 11.84, 11.99, 12.02, 12.02, 12.01]], 2)
def test_FugaMultiLUTDeficit_multiprocessing():
site = UniformSite()
wt = WindTurbine.from_WindTurbine_lst([
WindTurbine(name='WT80_70', diameter=80, hub_height=70,
powerCtFunction=CubePowerSimpleCt(power_rated=2000)),
WindTurbine(name="WT120_90", diameter=120, hub_height=90,
powerCtFunction=CubePowerSimpleCt(power_rated=4500))])
deficitModel = FugaMultiLUTDeficit()
wfm = All2AllIterative(site, wt, deficitModel,
blockage_deficitModel=deficitModel,
initialize_with_PropagateDownwind=False)
x = np.arange(2) * 500
y = x * 0
sim_res = wfm(x, y, type=[0, 1], wd=[90, 240, 270], n_cpu=2)
# sim_res = wfm([0], [0], type=[0], wd=[90, 240, 270])
z = deficitModel.z
if 0:
print(np.round(sim_res.flow_map(grid=XZGrid(y=0, x=[200], z=z), wd=[90, 270]).WS_eff.squeeze()[::2], 2).T)
ax = plt.gca()
for wd in [90, 270]:
plt.figure()
sim_res.flow_map(grid=XZGrid(y=0, x=np.linspace(-200, 1000), z=z), wd=wd).plot_wake_map()
sim_res.flow_map(grid=XZGrid(y=0, x=[200], z=z), wd=wd).WS_eff[::2].plot(marker='.', ax=ax, y='h')
plt.show()
uz = sim_res.flow_map(grid=XZGrid(y=0, x=[200], z=z), wd=[90, 270]).WS_eff.squeeze()[::2].T
npt.assert_array_almost_equal(uz, [
[10.05, 9.59, 9.09, 8.62, 8.2, 7.81, 7.51, 7.29, 7.15, 7.08, 7.09,
7.19, 7.42, 7.85, 8.46, 9.3, 10.25, 11.09, 11.65, 11.93],
[10.72, 10.14, 9.5, 8.94, 8.51, 8.16, 7.95, 7.84, 7.83, 7.93, 8.18,
8.66, 9.48, 10.59, 11.42, 11.84, 11.99, 12.02, 12.02, 12.01]], 2)
# def cmp_fuga_with_colonel():
# from py_wake.aep_calculator import AEPCalculator
#
......
......@@ -98,7 +98,8 @@ After that pass the new netcdf file to Fuga instead of the old folder""",
self.z = self.z0 * np.exp(self.zlevels * self.ds)
else:
self.dataset = ds = xr.open_dataset(path)
ds = xr.open_dataset(path)
self.dataset_path = path
self.x, self.y, self.z = ds.x.values, ds.y.values, ds.z.values
self.dx, self.dy = np.diff(self.x[:2]), np.diff(self.y[:2])
self.zeta0, self.zHub, self.z0 = ds.zeta0.item(), ds.hubheight.item(), ds.z0.item()
......@@ -109,17 +110,18 @@ After that pass the new netcdf file to Fuga instead of the old folder""",
return np.concatenate([((1, -1)[anti_symmetric]) * x[::-1], x[1:]])
def lut_exists(self, zlevels=None):
if hasattr(self, 'dataset'):
if hasattr(self, 'dataset_path'):
return [k for k in ['UL', 'UT', 'VL', 'VT', 'WL', 'WT', 'PL', 'PT']
if k in self.dataset]
if k in xr.open_dataset(self.dataset_path)]
else:
return {uvwp_lt for uvwp_lt in ['UL', 'UT', 'VL', 'VT', 'WL', 'WT', 'PL', 'PT']
if np.all([(self.path / (self.prefix + '%04d%s.dat' % (j, uvwp_lt))).exists()
for j in (zlevels or self.zlevels)])}
def load_luts(self, UVLT=['UL', 'UT', 'VL', 'VT'], zlevels=None):
if hasattr(self, 'dataset'):
return np.array([self.dataset[k].load().transpose('z', 'y', 'x').values for k in UVLT])
if hasattr(self, 'dataset_path'):
dataset = xr.load_dataset(self.dataset_path)
return np.array([dataset[k].load().transpose('z', 'y', 'x').values for k in UVLT])
else:
luts = np.array([[np.fromfile(str(self.path / (self.prefix + '%04d%s.dat' % (j, uvlt))), np.dtype('<f'), -1)
for j in (zlevels or self.zlevels)] for uvlt in UVLT])
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment