feat: removed attribute is_merged.

This commit is contained in:
Fabian Joswig 2023-01-06 13:34:26 +01:00
parent b708411830
commit a53fb18821
No known key found for this signature in database
6 changed files with 2 additions and 28 deletions

View file

@ -41,8 +41,6 @@ def create_json_string(ol, description='', indent=1):
for r_name in ol[0].e_content[name]:
rd = {}
rd['name'] = r_name
if ol[0].is_merged.get(r_name, False):
rd['is_merged'] = True
rd['deltas'] = []
offsets = [o.r_values[r_name] - o.value for o in ol]
deltas = np.column_stack([ol[oi].deltas[r_name] + offsets[oi] for oi in range(No)])
@ -138,7 +136,6 @@ def create_json_string(ol, description='', indent=1):
for name in obs._covobs:
my_obs.names.append(name)
my_obs.reweighted = obs.reweighted
my_obs.is_merged = obs.is_merged
return my_obs
def write_Corr_to_dict(my_corr):
@ -258,7 +255,6 @@ def _parse_json_dict(json_dict, verbose=True, full_output=False):
retd['names'] = []
retd['idl'] = []
retd['deltas'] = []
retd['is_merged'] = {}
for ens in d:
for rep in ens['replica']:
rep_name = rep['name']
@ -270,7 +266,6 @@ def _parse_json_dict(json_dict, verbose=True, full_output=False):
retd['names'].append(rep_name)
retd['idl'].append([di[0] for di in rep['deltas']])
retd['deltas'].append(np.array([di[1:] for di in rep['deltas']]))
retd['is_merged'][rep_name] = rep.get('is_merged', False)
return retd
def _gen_covobsd_from_cdatad(d):
@ -300,7 +295,6 @@ def _parse_json_dict(json_dict, verbose=True, full_output=False):
if od:
ret = Obs([[ddi[0] + values[0] for ddi in di] for di in od['deltas']], od['names'], idl=od['idl'])
ret._value = values[0]
ret.is_merged = od['is_merged']
else:
ret = Obs([], [], means=[])
ret._value = values[0]
@ -326,7 +320,6 @@ def _parse_json_dict(json_dict, verbose=True, full_output=False):
if od:
ret.append(Obs([list(di[:, i] + values[i]) for di in od['deltas']], od['names'], idl=od['idl']))
ret[-1]._value = values[i]
ret[-1].is_merged = od['is_merged']
else:
ret.append(Obs([], [], means=[]))
ret[-1]._value = values[i]
@ -354,7 +347,6 @@ def _parse_json_dict(json_dict, verbose=True, full_output=False):
if od:
ret.append(Obs([di[:, i] + values[i] for di in od['deltas']], od['names'], idl=od['idl']))
ret[-1]._value = values[i]
ret[-1].is_merged = od['is_merged']
else:
ret.append(Obs([], [], means=[]))
ret[-1]._value = values[i]

View file

@ -932,7 +932,6 @@ def qtop_projection(qtop, target=0):
proj_qtop.append(np.array([1 if round(qtop.r_values[n] + q) == target else 0 for q in qtop.deltas[n]]))
reto = Obs(proj_qtop, qtop.names, idl=[qtop.idl[name] for name in qtop.names])
reto.is_merged = qtop.is_merged
return reto

View file

@ -109,7 +109,7 @@ def _assert_equal_properties(ol, otype=Obs):
for o in ol[1:]:
if not isinstance(o, otype):
raise Exception("Wrong data type in list.")
for attr in ["is_merged", "reweighted", "e_content", "idl"]:
for attr in ["reweighted", "e_content", "idl"]:
if hasattr(ol[0], attr):
if not getattr(ol[0], attr) == getattr(o, attr):
raise Exception(f"All Obs in list have to have the same state '{attr}'.")

View file

@ -49,7 +49,7 @@ class Obs:
'ddvalue', 'reweighted', 'S', 'tau_exp', 'N_sigma',
'e_dvalue', 'e_ddvalue', 'e_tauint', 'e_dtauint',
'e_windowsize', 'e_rho', 'e_drho', 'e_n_tauint', 'e_n_dtauint',
'idl', 'is_merged', 'tag', '_covobs', '__dict__']
'idl', 'tag', '_covobs', '__dict__']
S_global = 2.0
S_dict = {}
@ -97,7 +97,6 @@ class Obs:
self._value = 0
self.N = 0
self.is_merged = {}
self.idl = {}
if idl is not None:
for name, idx in sorted(zip(names, idl)):
@ -1153,7 +1152,6 @@ def derived_observable(func, data, array_mode=False, **kwargs):
new_cov_names = sorted(set([y for x in [o.cov_names for o in raveled_data] for y in x]))
new_sample_names = sorted(set(new_names) - set(new_cov_names))
is_merged = {name: (len(list(filter(lambda o: o.is_merged.get(name, False) is True, raveled_data))) > 0) for name in new_sample_names}
reweighted = len(list(filter(lambda o: o.reweighted is True, raveled_data))) > 0
if data.ndim == 1:
@ -1179,8 +1177,6 @@ def derived_observable(func, data, array_mode=False, **kwargs):
tmp_values = np.array(tmp_values).reshape(data.shape)
new_r_values[name] = func(tmp_values, **kwargs)
new_idl_d[name] = _merge_idx(idl)
if not is_merged[name]:
is_merged[name] = (1 != len(set([len(idx) for idx in [*idl, new_idl_d[name]]])))
if 'man_grad' in kwargs:
deriv = np.asarray(kwargs.get('man_grad'))
@ -1266,7 +1262,6 @@ def derived_observable(func, data, array_mode=False, **kwargs):
final_result[i_val].names.append(name)
final_result[i_val]._covobs = new_covobs
final_result[i_val]._value = new_val
final_result[i_val].is_merged = is_merged
final_result[i_val].reweighted = reweighted
if multi == 0:
@ -1347,7 +1342,6 @@ def reweight(weight, obs, **kwargs):
result.append(tmp_obs / new_weight)
result[-1].reweighted = True
result[-1].is_merged = obs[i].is_merged
return result
@ -1391,7 +1385,6 @@ def correlate(obs_a, obs_b):
new_idl.append(obs_a.idl[name])
o = Obs(new_samples, sorted(obs_a.names), idl=new_idl)
o.is_merged = {name: (obs_a.is_merged.get(name, False) or obs_b.is_merged.get(name, False)) for name in o.names}
o.reweighted = obs_a.reweighted or obs_b.reweighted
return o
@ -1589,7 +1582,6 @@ def merge_obs(list_of_obs):
names = sorted(new_dict.keys())
o = Obs([new_dict[name] for name in names], names, idl=[idl_dict[name] for name in names])
o.is_merged = {name: np.any([oi.is_merged.get(name, False) for oi in list_of_obs]) for name in o.names}
o.reweighted = np.max([oi.reweighted for oi in list_of_obs])
return o

View file

@ -404,11 +404,4 @@ def assert_equal_Obs(to, ro):
if not np.allclose(v, v2, atol=1e-14):
print(kw, "does not match.")
return False
m_to = getattr(to, "is_merged")
m_ro = getattr(ro, "is_merged")
if not m_to == m_ro:
if not (all(value is False for value in m_ro.values()) and all(value is False for value in m_to.values())):
print("is_merged", "does not match.")
return False
return True

View file

@ -201,8 +201,6 @@ def test_matmul_irregular_histories():
t2 = pe.linalg.matmul(standard_matrix, irregular_matrix)
assert np.all([o.is_zero() for o in (t1 - t2).ravel()])
assert np.all([o.is_merged for o in t1.ravel()])
assert np.all([o.is_merged for o in t2.ravel()])
def test_irregular_matrix_inverse():