Skip to content

Commit

Permalink
Modify cross-over (SymNet-based search)
Browse files Browse the repository at this point in the history
  • Loading branch information
LisIva committed Apr 18, 2024
1 parent b92db9b commit 80515b0
Show file tree
Hide file tree
Showing 3 changed files with 82 additions and 10 deletions.
12 changes: 10 additions & 2 deletions Experiments on no noise data/experiment_burgers_sindy.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,9 +96,9 @@ def hash_term(term):
grids = np.meshgrid(t, x, indexing='ij')

''' Parameters of the experiment '''
write_csv = True
write_csv = False
print_results = True
max_iter_number = 50
max_iter_number = 1
title = 'dfs0'

terms = [('u',), ('du/dx1',), ('du/dx2',), ('d^2u/dx2^2',), ('u', 'du/dx1'), ('u', 'du/dx2'), ('u', 'd^2u/dx2^2'),
Expand Down Expand Up @@ -132,6 +132,14 @@ def hash_term(term):
epde_search_obj.equation_search_results(only_print=True, num=4)
time1 = end-start

# keys = list(epde_search_obj.cache[1].memory_default.keys())
# for i in range(4):
# val = epde_search_obj.cache[1].memory_default.get(keys[i])
# name = keys[i][0].replace("/", '_')
# path = os.path.join(Path().absolute().parent, "data_burg", name)
# np.save(path, val)
# print()

res = epde_search_obj.equation_search_results(only_print=False, num=4)
difference_ls = find_coeff_diff(res, coefficients)

Expand Down
50 changes: 42 additions & 8 deletions epde/operators/multiobjective/variation.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@
from epde.operators.utils.template import CompoundOperator, add_base_param_to_operator
from epde.operators.multiobjective.moeadd_specific import get_basic_populator_updater
from epde.operators.multiobjective.mutations import get_basic_mutation
from sympy import Mul, Symbol
import epde.globals as global_var
from symnet.preproc_output import get_cross_distr, to_symbolic


class ParetoLevelsCrossover(CompoundOperator):
Expand Down Expand Up @@ -161,17 +164,49 @@ def apply(self, objective : tuple, arguments : dict):
check_uniqueness(temp_term_2, objective[1].structure[:i] + objective[1].structure[i+1:])):
objective[0].structure[i] = temp_term_1; objective[1].structure[i] = temp_term_2

for i in range(same_num + similar_num, len(objective[0].structure)):
if check_uniqueness(objective[0].structure[i], objective[1].structure) and check_uniqueness(objective[1].structure[i], objective[0].structure):
objective[0].structure[i], objective[1].structure[i] = self.suboperators['term_crossover'].apply(objective = (objective[0].structure[i],
objective[1].structure[i]),
arguments = subop_args['term_crossover'])
# CHECK OUT HERE!!!
if same_num + similar_num < len(objective[0].structure):
start_idx = same_num + similar_num
eq1_distr = self.get_equation_cross_distr(objective[0], start_idx)
eq2_distr = self.get_equation_cross_distr(objective[1], start_idx)
for _ in range(len(eq1_distr)):
idx0 = np.random.choice(list(eq1_distr.keys()), p=list(eq1_distr.values()))
idx1 = np.random.choice(list(eq2_distr.keys()), p=list(eq2_distr.values()))

if check_uniqueness(objective[0].structure[idx0], objective[1].structure) and check_uniqueness(
objective[1].structure[idx1], objective[0].structure):

objective[0].structure[idx0], objective[1].structure[idx1], recalc_distr = self.suboperators['term_crossover'].apply(
objective=(objective[0].structure[idx0],objective[1].structure[idx1]),
arguments=subop_args['term_crossover'])

if recalc_distr:
eq1_distr = self.get_equation_cross_distr(objective[0], start_idx)
eq2_distr = self.get_equation_cross_distr(objective[1], start_idx)


# for i in range(same_num + similar_num, len(objective[0].structure)):
# if check_uniqueness(objective[0].structure[i], objective[1].structure) and check_uniqueness(objective[1].structure[i], objective[0].structure):
# objective[0].structure[i], objective[1].structure[i] = self.suboperators['term_crossover'].apply(objective = (objective[0].structure[i],
# objective[1].structure[i]),
# arguments = subop_args['term_crossover'])

return objective[0], objective[1]

def use_default_tags(self):
self._tags = {'crossover', 'gene level', 'contains suboperators', 'standard'}


def get_equation_cross_distr(self, equation, start_idx):
importance_coeffs = {}
for i in range(start_idx, len(equation.structure)):
sym_term = to_symbolic(equation.structure[i])
importance_coeffs[sym_term] = global_var.sympool.pool_dict.get(sym_term)
cross_distr = get_cross_distr(importance_coeffs, start_idx, len(equation.structure))
return cross_distr



class EquationExchangeCrossover(CompoundOperator):
key = 'EquationExchangeCrossover'

Expand Down Expand Up @@ -286,12 +321,11 @@ def apply(self, objective : tuple, arguments : dict):
"""
self_args, subop_args = self.parse_suboperator_args(arguments = arguments)

if (np.random.uniform(0, 1) <= self.params['crossover_probability'] and
objective[1].descr_variable_marker == objective[0].descr_variable_marker):
return objective[1], objective[0]
return objective[1], objective[0], True
else:
return objective[0], objective[1]
return objective[0], objective[1], False

def use_default_tags(self):
self._tags = {'crossover', 'term level', 'exploration', 'no suboperators', 'standard'}
Expand Down
30 changes: 30 additions & 0 deletions symnet/preproc_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,3 +44,33 @@ def cast_to_symbols(pool_names: list[tuple[str]]):
term_symbolic = list(map(lambda u: Symbol(u), name))
pool_ls.append(Mul(*term_symbolic))
return pool_ls


def to_symbolic(term):
if type(term.cache_label[0]) == tuple:
labels = []
for label in term.cache_label:
labels.append(str(label[0]))
symlabels = list(map(lambda token: Symbol(token), labels))
return Mul(*symlabels)
else:
return Symbol(str(term.cache_label[0]))


def get_cross_distr(custom_cross_prob, start_idx, end_idx_exclude):
mmf = 2.4
values = list(custom_cross_prob.values())
csym_arr = np.fabs(np.array(values))

if np.max(csym_arr) / np.min(csym_arr) > 2.6:
min_max_coeff = mmf * np.min(csym_arr) - np.max(csym_arr)
smoothing_factor = min_max_coeff / (min_max_coeff - (mmf - 1) * np.average(csym_arr))
uniform_csym = np.array([np.sum(csym_arr) / len(csym_arr)] * len(csym_arr))

smoothed_array = (1 - smoothing_factor) * csym_arr + smoothing_factor * uniform_csym
inv = 1 / smoothed_array
else:
inv = 1 / csym_arr
inv_norm = inv / np.sum(inv)

return dict(zip([i for i in range(start_idx, end_idx_exclude)], inv_norm.tolist()))

0 comments on commit 80515b0

Please sign in to comment.